hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cnaur...@apache.org
Subject svn commit: r1515989 - in /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs: ./ src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/ src/test/java/org/apache/hadoop/hdfs/web/
Date Tue, 20 Aug 2013 22:15:23 GMT
Author: cnauroth
Date: Tue Aug 20 22:15:22 2013
New Revision: 1515989

URL: http://svn.apache.org/r1515989
Log:
HDFS-4594. WebHDFS open sets Content-Length header to what is specified by length parameter
rather than how much data is actually returned. Contributed by Chris Nauroth.

Modified:
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1515989&r1=1515988&r2=1515989&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Tue Aug 20 22:15:22 2013
@@ -372,6 +372,9 @@ Release 2.1.1-beta - UNRELEASED
     HDFS-5106. TestDatanodeBlockScanner fails on Windows due to incorrect path
     format. (Chuan Liu via cnauroth)
 
+    HDFS-4594. WebHDFS open sets Content-Length header to what is specified by
+    length parameter rather than how much data is actually returned. (cnauroth)
+
 Release 2.1.0-beta - 2013-08-22
 
   INCOMPATIBLE CHANGES

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java?rev=1515989&r1=1515988&r2=1515989&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
(original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
Tue Aug 20 22:15:22 2013
@@ -410,8 +410,9 @@ public class DatanodeWebHdfsMethods {
         throw ioe;
       }
       
-      final long n = length.getValue() != null? length.getValue()
-        : in.getVisibleLength() - offset.getValue();
+      final long n = length.getValue() != null ?
+        Math.min(length.getValue(), in.getVisibleLength() - offset.getValue()) :
+        in.getVisibleLength() - offset.getValue();
       return Response.ok(new OpenEntity(in, n, dfsclient)).type(
           MediaType.APPLICATION_OCTET_STREAM).build();
     }

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java?rev=1515989&r1=1515988&r2=1515989&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java
(original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java
Tue Aug 20 22:15:22 2013
@@ -21,6 +21,7 @@ package org.apache.hadoop.hdfs.web;
 import java.io.BufferedReader;
 import java.io.FileNotFoundException;
 import java.io.IOException;
+import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.net.HttpURLConnection;
 import java.net.URL;
@@ -45,8 +46,11 @@ import org.apache.hadoop.hdfs.MiniDFSClu
 import org.apache.hadoop.hdfs.web.resources.DoAsParam;
 import org.apache.hadoop.hdfs.web.resources.GetOpParam;
 import org.apache.hadoop.hdfs.web.resources.HttpOpParam;
+import org.apache.hadoop.hdfs.web.resources.LengthParam;
 import org.apache.hadoop.hdfs.web.resources.NamenodeRpcAddressParam;
+import org.apache.hadoop.hdfs.web.resources.OffsetParam;
 import org.apache.hadoop.hdfs.web.resources.PutOpParam;
+import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.junit.Assert;
@@ -288,6 +292,104 @@ public class TestWebHdfsFileSystemContra
     }
   }
 
+  /**
+   * Test get with length parameter greater than actual file length.
+   */
+  public void testLengthParamLongerThanFile() throws IOException {
+    WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)fs;
+    Path dir = new Path("/test");
+    assertTrue(webhdfs.mkdirs(dir));
+
+    // Create a file with some content.
+    Path testFile = new Path("/test/testLengthParamLongerThanFile");
+    String content = "testLengthParamLongerThanFile";
+    FSDataOutputStream testFileOut = webhdfs.create(testFile);
+    try {
+      testFileOut.write(content.getBytes("US-ASCII"));
+    } finally {
+      IOUtils.closeStream(testFileOut);
+    }
+
+    // Open the file, but request length longer than actual file length by 1.
+    HttpOpParam.Op op = GetOpParam.Op.OPEN;
+    URL url = webhdfs.toUrl(op, testFile, new LengthParam(Long.valueOf(
+      content.length() + 1)));
+    HttpURLConnection conn = null;
+    InputStream is = null;
+    try {
+      conn = (HttpURLConnection)url.openConnection();
+      conn.setRequestMethod(op.getType().toString());
+      conn.setDoOutput(op.getDoOutput());
+      conn.setInstanceFollowRedirects(true);
+
+      // Expect OK response and Content-Length header equal to actual length.
+      assertEquals(HttpServletResponse.SC_OK, conn.getResponseCode());
+      assertEquals(String.valueOf(content.length()), conn.getHeaderField(
+        "Content-Length"));
+
+      // Check content matches.
+      byte[] respBody = new byte[content.length()];
+      is = conn.getInputStream();
+      IOUtils.readFully(is, respBody, 0, content.length());
+      assertEquals(content, new String(respBody, "US-ASCII"));
+    } finally {
+      IOUtils.closeStream(is);
+      if (conn != null) {
+        conn.disconnect();
+      }
+    }
+  }
+
+  /**
+   * Test get with offset and length parameters that combine to request a length
+   * greater than actual file length.
+   */
+  public void testOffsetPlusLengthParamsLongerThanFile() throws IOException {
+    WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)fs;
+    Path dir = new Path("/test");
+    assertTrue(webhdfs.mkdirs(dir));
+
+    // Create a file with some content.
+    Path testFile = new Path("/test/testOffsetPlusLengthParamsLongerThanFile");
+    String content = "testOffsetPlusLengthParamsLongerThanFile";
+    FSDataOutputStream testFileOut = webhdfs.create(testFile);
+    try {
+      testFileOut.write(content.getBytes("US-ASCII"));
+    } finally {
+      IOUtils.closeStream(testFileOut);
+    }
+
+    // Open the file, but request offset starting at 1 and length equal to file
+    // length.  Considering the offset, this is longer than the actual content.
+    HttpOpParam.Op op = GetOpParam.Op.OPEN;
+    URL url = webhdfs.toUrl(op, testFile, new LengthParam(Long.valueOf(
+      content.length())), new OffsetParam(1L));
+    HttpURLConnection conn = null;
+    InputStream is = null;
+    try {
+      conn = (HttpURLConnection)url.openConnection();
+      conn.setRequestMethod(op.getType().toString());
+      conn.setDoOutput(op.getDoOutput());
+      conn.setInstanceFollowRedirects(true);
+
+      // Expect OK response and Content-Length header equal to actual length.
+      assertEquals(HttpServletResponse.SC_OK, conn.getResponseCode());
+      assertEquals(String.valueOf(content.length() - 1), conn.getHeaderField(
+        "Content-Length"));
+
+      // Check content matches.
+      byte[] respBody = new byte[content.length() - 1];
+      is = conn.getInputStream();
+      IOUtils.readFully(is, respBody, 0, content.length() - 1);
+      assertEquals(content.substring(1), new String(respBody, "US-ASCII"));
+    } finally {
+      IOUtils.closeStream(is);
+      if (conn != null) {
+        conn.disconnect();
+      }
+    }
+  }
+
   public void testResponseCode() throws IOException {
     final WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)fs;
     final Path root = new Path("/");



Mime
View raw message