Return-Path: X-Original-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Delivered-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 24D229146 for ; Tue, 8 Nov 2011 20:02:48 +0000 (UTC) Received: (qmail 30277 invoked by uid 500); 8 Nov 2011 20:02:48 -0000 Delivered-To: apmail-hadoop-hdfs-commits-archive@hadoop.apache.org Received: (qmail 30239 invoked by uid 500); 8 Nov 2011 20:02:48 -0000 Mailing-List: contact hdfs-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hdfs-dev@hadoop.apache.org Delivered-To: mailing list hdfs-commits@hadoop.apache.org Received: (qmail 30231 invoked by uid 99); 8 Nov 2011 20:02:47 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 08 Nov 2011 20:02:47 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 08 Nov 2011 20:02:41 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id 9FE2523888FD; Tue, 8 Nov 2011 20:02:19 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1199422 - in /hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs: ./ src/main/java/ src/main/java/org/apache/hadoop/hdfs/ src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/ src/main/java/org/apache/hadoop... Date: Tue, 08 Nov 2011 20:02:19 -0000 To: hdfs-commits@hadoop.apache.org From: szetszwo@apache.org X-Mailer: svnmailer-1.0.8-patched Message-Id: <20111108200219.9FE2523888FD@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: szetszwo Date: Tue Nov 8 20:02:18 2011 New Revision: 1199422 URL: http://svn.apache.org/viewvc?rev=1199422&view=rev Log: svn merge -c 1197335 from 0.23 for HDFS-2527. Added: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestOffsetUrlInputStream.java - copied unchanged from r1197335, hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestOffsetUrlInputStream.java Modified: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/ (props changed) hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/ (props changed) hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/ByteRangeInputStream.java hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HftpFileSystem.java hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestByteRangeInputStream.java hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java Propchange: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/ ------------------------------------------------------------------------------ --- svn:mergeinfo (original) +++ svn:mergeinfo Tue Nov 8 20:02:18 2011 @@ -1,5 +1,5 @@ -/hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs:1196812 -/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs:1161777,1161781,1162188,1162421,1162491,1162499,1162613,1162928,1162954,1162979,1163050,1163069,1163081,1163490,1163768,1164255,1164301,1164339,1166402,1167383,1167662,1170085,1170379,1170459,1170996,1171136,1171297,1171379,1171611,1172916,1173402,1173468,1175113,1176178,1176550,1176719,1176729,1176733,1177100,1177161,1177487,1177531,1177757,1177859,1177864,1177905,1179169,1179856,1179861,1180757,1183081,1183098,1183175,1183554,1186508,1187140,1189028,1189355,1189360,1189546,1189932,1189982,1190077,1190708,1195575,1195656,1195731,1195754,1196113,1196129 +/hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs:1196812,1197335 +/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs:1161777,1161781,1162188,1162421,1162491,1162499,1162613,1162928,1162954,1162979,1163050,1163069,1163081,1163490,1163768,1164255,1164301,1164339,1166402,1167383,1167662,1170085,1170379,1170459,1170996,1171136,1171297,1171379,1171611,1172916,1173402,1173468,1175113,1176178,1176550,1176719,1176729,1176733,1177100,1177161,1177487,1177531,1177757,1177859,1177864,1177905,1179169,1179856,1179861,1180757,1183081,1183098,1183175,1183554,1186508,1187140,1189028,1189355,1189360,1189546,1189932,1189982,1190077,1190708,1195575,1195656,1195731,1195754,1196113,1196129,1197329 /hadoop/core/branches/branch-0.19/hdfs:713112 /hadoop/hdfs/branches/HDFS-1052:987665-1095512 /hadoop/hdfs/branches/HDFS-265:796829-820463 Modified: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1199422&r1=1199421&r2=1199422&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original) +++ hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Tue Nov 8 20:02:18 2011 @@ -1178,6 +1178,13 @@ Release 0.23.0 - 2011-11-01 HDFS-2416. distcp with a webhdfs uri on a secure cluster fails. (jitendra) + HDFS-2527. WebHdfs: remove the use of "Range" header in Open; use ugi + username if renewer parameter is null in GetDelegationToken; response OK + when setting replication for non-files; rename GETFILEBLOCKLOCATIONS to + GET_BLOCK_LOCATIONS and state that it is a private unstable API; replace + isDirectory and isSymlink with enum {FILE, DIRECTORY, SYMLINK} in + HdfsFileStatus JSON object. (szetszwo) + BREAKDOWN OF HDFS-1073 SUBTASKS HDFS-1521. Persist transaction ID on disk between NN restarts. Propchange: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/ ------------------------------------------------------------------------------ --- svn:mergeinfo (original) +++ svn:mergeinfo Tue Nov 8 20:02:18 2011 @@ -1,5 +1,5 @@ -/hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java:1196812 -/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java:1161777,1161781,1162188,1162421,1162491,1162499,1162613,1162928,1162954,1162979,1163050,1163069,1163081,1163490,1163768,1164255,1164301,1164339,1166402,1167383,1167662,1170085,1170379,1170459,1170996,1171136,1171297,1171379,1171611,1172916,1173402,1173468,1175113,1176178,1176550,1176719,1176729,1176733,1177100,1177161,1177487,1177531,1177757,1177859,1177864,1177905,1179169,1179856,1179861,1180757,1183081,1183098,1183175,1183554,1186508,1187140,1189028,1189355,1189360,1189546,1189932,1189982,1190077,1190708,1195575,1195656,1195731,1195754,1196113,1196129 +/hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java:1196812,1197335 +/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java:1161777,1161781,1162188,1162421,1162491,1162499,1162613,1162928,1162954,1162979,1163050,1163069,1163081,1163490,1163768,1164255,1164301,1164339,1166402,1167383,1167662,1170085,1170379,1170459,1170996,1171136,1171297,1171379,1171611,1172916,1173402,1173468,1175113,1176178,1176550,1176719,1176729,1176733,1177100,1177161,1177487,1177531,1177757,1177859,1177864,1177905,1179169,1179856,1179861,1180757,1183081,1183098,1183175,1183554,1186508,1187140,1189028,1189355,1189360,1189546,1189932,1189982,1190077,1190708,1195575,1195656,1195731,1195754,1196113,1196129,1197329 /hadoop/core/branches/branch-0.19/hdfs/src/java:713112 /hadoop/core/trunk/src/hdfs:776175-785643,785929-786278 /hadoop/hdfs/branches/HDFS-1052/src/java:987665-1095512 Modified: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/ByteRangeInputStream.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/ByteRangeInputStream.java?rev=1199422&r1=1199421&r2=1199422&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/ByteRangeInputStream.java (original) +++ hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/ByteRangeInputStream.java Tue Nov 8 20:02:18 2011 @@ -18,17 +18,13 @@ package org.apache.hadoop.hdfs; -import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.net.HttpURLConnection; -import java.net.MalformedURLException; import java.net.URL; -import java.util.StringTokenizer; import org.apache.hadoop.fs.FSInputStream; import org.apache.hadoop.hdfs.server.namenode.StreamFile; -import org.apache.hadoop.hdfs.web.resources.OffsetParam; /** * To support HTTP byte streams, a new connection to an HTTP server needs to be @@ -37,16 +33,14 @@ import org.apache.hadoop.hdfs.web.resour * is made on the successive read(). The normal input stream functions are * connected to the currently active input stream. */ -public class ByteRangeInputStream extends FSInputStream { +public abstract class ByteRangeInputStream extends FSInputStream { /** * This class wraps a URL and provides method to open connection. * It can be overridden to change how a connection is opened. */ - public static class URLOpener { + public static abstract class URLOpener { protected URL url; - /** The url with offset parameter */ - protected URL offsetUrl; public URLOpener(URL u) { url = u; @@ -60,52 +54,9 @@ public class ByteRangeInputStream extend return url; } - protected HttpURLConnection openConnection() throws IOException { - return (HttpURLConnection)offsetUrl.openConnection(); - } + protected abstract HttpURLConnection openConnection() throws IOException; - private HttpURLConnection openConnection(final long offset) throws IOException { - offsetUrl = offset == 0L? url: new URL(url + "&" + new OffsetParam(offset)); - final HttpURLConnection conn = openConnection(); - conn.setRequestMethod("GET"); - if (offset != 0L) { - conn.setRequestProperty("Range", "bytes=" + offset + "-"); - } - return conn; - } - } - - static private final String OFFSET_PARAM_PREFIX = OffsetParam.NAME + "="; - - /** Remove offset parameter, if there is any, from the url */ - static URL removeOffsetParam(final URL url) throws MalformedURLException { - String query = url.getQuery(); - if (query == null) { - return url; - } - final String lower = query.toLowerCase(); - if (!lower.startsWith(OFFSET_PARAM_PREFIX) - && !lower.contains("&" + OFFSET_PARAM_PREFIX)) { - return url; - } - - //rebuild query - StringBuilder b = null; - for(final StringTokenizer st = new StringTokenizer(query, "&"); - st.hasMoreTokens();) { - final String token = st.nextToken(); - if (!token.toLowerCase().startsWith(OFFSET_PARAM_PREFIX)) { - if (b == null) { - b = new StringBuilder("?").append(token); - } else { - b.append('&').append(token); - } - } - } - query = b == null? "": b.toString(); - - final String urlStr = url.toString(); - return new URL(urlStr.substring(0, urlStr.indexOf('?')) + query); + protected abstract HttpURLConnection openConnection(final long offset) throws IOException; } enum StreamStatus { @@ -120,11 +71,6 @@ public class ByteRangeInputStream extend StreamStatus status = StreamStatus.SEEK; - /** Create an input stream with the URL. */ - public ByteRangeInputStream(final URL url) { - this(new URLOpener(url), new URLOpener(null)); - } - /** * Create with the specified URLOpeners. Original url is used to open the * stream for the first time. Resolved url is used in subsequent requests. @@ -136,6 +82,12 @@ public class ByteRangeInputStream extend this.resolvedURL = r; } + protected abstract void checkResponseCode(final HttpURLConnection connection + ) throws IOException; + + protected abstract URL getResolvedUrl(final HttpURLConnection connection + ) throws IOException; + private InputStream getInputStream() throws IOException { if (status != StreamStatus.NORMAL) { @@ -150,32 +102,14 @@ public class ByteRangeInputStream extend (resolvedURL.getURL() == null) ? originalURL : resolvedURL; final HttpURLConnection connection = opener.openConnection(startPos); - try { - connection.connect(); - final String cl = connection.getHeaderField(StreamFile.CONTENT_LENGTH); - filelength = (cl == null) ? -1 : Long.parseLong(cl); - if (HftpFileSystem.LOG.isDebugEnabled()) { - HftpFileSystem.LOG.debug("filelength = " + filelength); - } - in = connection.getInputStream(); - } catch (FileNotFoundException fnfe) { - throw fnfe; - } catch (IOException ioe) { - HftpFileSystem.throwIOExceptionFromConnection(connection, ioe); - } - - int respCode = connection.getResponseCode(); - if (startPos != 0 && respCode != HttpURLConnection.HTTP_PARTIAL) { - // We asked for a byte range but did not receive a partial content - // response... - throw new IOException("HTTP_PARTIAL expected, received " + respCode); - } else if (startPos == 0 && respCode != HttpURLConnection.HTTP_OK) { - // We asked for all bytes from the beginning but didn't receive a 200 - // response (none of the other 2xx codes are valid here) - throw new IOException("HTTP_OK expected, received " + respCode); - } + connection.connect(); + checkResponseCode(connection); + + final String cl = connection.getHeaderField(StreamFile.CONTENT_LENGTH); + filelength = (cl == null) ? -1 : Long.parseLong(cl); + in = connection.getInputStream(); - resolvedURL.setURL(removeOffsetParam(connection.getURL())); + resolvedURL.setURL(getResolvedUrl(connection)); status = StreamStatus.NORMAL; } Modified: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HftpFileSystem.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HftpFileSystem.java?rev=1199422&r1=1199421&r2=1199422&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HftpFileSystem.java (original) +++ hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HftpFileSystem.java Tue Nov 8 20:02:18 2011 @@ -372,13 +372,66 @@ public class HftpFileSystem extends File return query; } + static class RangeHeaderUrlOpener extends ByteRangeInputStream.URLOpener { + RangeHeaderUrlOpener(final URL url) { + super(url); + } + + @Override + protected HttpURLConnection openConnection() throws IOException { + return (HttpURLConnection)url.openConnection(); + } + + /** Use HTTP Range header for specifying offset. */ + @Override + protected HttpURLConnection openConnection(final long offset) throws IOException { + final HttpURLConnection conn = openConnection(); + conn.setRequestMethod("GET"); + if (offset != 0L) { + conn.setRequestProperty("Range", "bytes=" + offset + "-"); + } + return conn; + } + } + + static class RangeHeaderInputStream extends ByteRangeInputStream { + RangeHeaderInputStream(RangeHeaderUrlOpener o, RangeHeaderUrlOpener r) { + super(o, r); + } + + RangeHeaderInputStream(final URL url) { + this(new RangeHeaderUrlOpener(url), new RangeHeaderUrlOpener(null)); + } + + /** Expects HTTP_OK and HTTP_PARTIAL response codes. */ + @Override + protected void checkResponseCode(final HttpURLConnection connection + ) throws IOException { + final int code = connection.getResponseCode(); + if (startPos != 0 && code != HttpURLConnection.HTTP_PARTIAL) { + // We asked for a byte range but did not receive a partial content + // response... + throw new IOException("HTTP_PARTIAL expected, received " + code); + } else if (startPos == 0 && code != HttpURLConnection.HTTP_OK) { + // We asked for all bytes from the beginning but didn't receive a 200 + // response (none of the other 2xx codes are valid here) + throw new IOException("HTTP_OK expected, received " + code); + } + } + + @Override + protected URL getResolvedUrl(final HttpURLConnection connection) { + return connection.getURL(); + } + } + @Override public FSDataInputStream open(Path f, int buffersize) throws IOException { f = f.makeQualified(getUri(), getWorkingDirectory()); String path = "/data" + ServletUtil.encodePath(f.toUri().getPath()); String query = addDelegationTokenParam("ugi=" + getEncodedUgiParameter()); URL u = getNamenodeURL(path, query); - return new FSDataInputStream(new ByteRangeInputStream(u)); + return new FSDataInputStream(new RangeHeaderInputStream(u)); } /** Class to parse and store a listing reply from the server. */ Modified: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java?rev=1199422&r1=1199421&r2=1199422&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java (original) +++ hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java Tue Nov 8 20:02:18 2011 @@ -350,9 +350,7 @@ public class DatanodeWebHdfsMethods { } }; - final int status = offset.getValue() == 0? - HttpServletResponse.SC_OK: HttpServletResponse.SC_PARTIAL_CONTENT; - return Response.status(status).entity(streaming).type( + return Response.ok(streaming).type( MediaType.APPLICATION_OCTET_STREAM).build(); } case GETFILECHECKSUM: Modified: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java?rev=1199422&r1=1199421&r2=1199422&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java (original) +++ hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java Tue Nov 8 20:02:18 2011 @@ -42,8 +42,6 @@ import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; -import javax.ws.rs.core.Response.ResponseBuilder; -import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.StreamingOutput; import org.apache.commons.logging.Log; @@ -68,7 +66,6 @@ import org.apache.hadoop.hdfs.web.resour import org.apache.hadoop.hdfs.web.resources.BlockSizeParam; import org.apache.hadoop.hdfs.web.resources.BufferSizeParam; import org.apache.hadoop.hdfs.web.resources.DelegationParam; -import org.apache.hadoop.hdfs.web.resources.TokenArgumentParam; import org.apache.hadoop.hdfs.web.resources.DeleteOpParam; import org.apache.hadoop.hdfs.web.resources.DestinationParam; import org.apache.hadoop.hdfs.web.resources.GetOpParam; @@ -87,6 +84,7 @@ import org.apache.hadoop.hdfs.web.resour import org.apache.hadoop.hdfs.web.resources.RenameOptionSetParam; import org.apache.hadoop.hdfs.web.resources.RenewerParam; import org.apache.hadoop.hdfs.web.resources.ReplicationParam; +import org.apache.hadoop.hdfs.web.resources.TokenArgumentParam; import org.apache.hadoop.hdfs.web.resources.UriFsPathParam; import org.apache.hadoop.hdfs.web.resources.UserParam; import org.apache.hadoop.net.NodeBase; @@ -153,8 +151,7 @@ public class NamenodeWebHdfsMethods { final NameNode namenode, final UserGroupInformation ugi, final String renewer) throws IOException { final Credentials c = DelegationTokenSecretManager.createCredentials( - namenode, ugi, - renewer != null? renewer: request.getUserPrincipal().getName()); + namenode, ugi, renewer != null? renewer: ugi.getShortUserName()); final Token t = c.getAllTokens().iterator().next(); t.setKind(WebHdfsFileSystem.TOKEN_KIND); SecurityUtil.setTokenService(t, namenode.getNameNodeAddress()); @@ -324,8 +321,7 @@ public class NamenodeWebHdfsMethods { { final boolean b = np.setReplication(fullpath, replication.getValue(conf)); final String js = JsonUtil.toJsonString("boolean", b); - final ResponseBuilder r = b? Response.ok(): Response.status(Status.FORBIDDEN); - return r.entity(js).type(MediaType.APPLICATION_JSON).build(); + return Response.ok(js).type(MediaType.APPLICATION_JSON).build(); } case SETOWNER: { @@ -508,7 +504,7 @@ public class NamenodeWebHdfsMethods { op.getValue(), offset.getValue(), offset, length, bufferSize); return Response.temporaryRedirect(uri).build(); } - case GETFILEBLOCKLOCATIONS: + case GET_BLOCK_LOCATIONS: { final long offsetValue = offset.getValue(); final Long lengthValue = length.getValue(); Modified: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java?rev=1199422&r1=1199421&r2=1199422&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java (original) +++ hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java Tue Nov 8 20:02:18 2011 @@ -134,6 +134,14 @@ public class JsonUtil { return new FsPermission(Short.parseShort(s, 8)); } + static enum PathType { + FILE, DIRECTORY, SYMLINK; + + static PathType valueOf(HdfsFileStatus status) { + return status.isDir()? DIRECTORY: status.isSymlink()? SYMLINK: FILE; + } + } + /** Convert a HdfsFileStatus object to a Json string. */ public static String toJsonString(final HdfsFileStatus status, boolean includeType) { @@ -142,13 +150,12 @@ public class JsonUtil { } final Map m = new TreeMap(); m.put("localName", status.getLocalName()); - m.put("isDir", status.isDir()); - m.put("isSymlink", status.isSymlink()); + m.put("type", PathType.valueOf(status)); if (status.isSymlink()) { m.put("symlink", status.getSymlink()); } - m.put("len", status.getLen()); + m.put("length", status.getLen()); m.put("owner", status.getOwner()); m.put("group", status.getGroup()); m.put("permission", toString(status.getPermission())); @@ -169,12 +176,11 @@ public class JsonUtil { final Map m = includesType ? (Map)json.get(HdfsFileStatus.class.getSimpleName()) : json; final String localName = (String) m.get("localName"); - final boolean isDir = (Boolean) m.get("isDir"); - final boolean isSymlink = (Boolean) m.get("isSymlink"); - final byte[] symlink = isSymlink? - DFSUtil.string2Bytes((String)m.get("symlink")): null; + final PathType type = PathType.valueOf((String) m.get("type")); + final byte[] symlink = type != PathType.SYMLINK? null + : DFSUtil.string2Bytes((String)m.get("symlink")); - final long len = (Long) m.get("len"); + final long len = (Long) m.get("length"); final String owner = (String) m.get("owner"); final String group = (String) m.get("group"); final FsPermission permission = toFsPermission((String) m.get("permission")); @@ -182,8 +188,8 @@ public class JsonUtil { final long mTime = (Long) m.get("modificationTime"); final long blockSize = (Long) m.get("blockSize"); final short replication = (short) (long) (Long) m.get("replication"); - return new HdfsFileStatus(len, isDir, replication, blockSize, mTime, aTime, - permission, owner, group, + return new HdfsFileStatus(len, type == PathType.DIRECTORY, replication, + blockSize, mTime, aTime, permission, owner, group, symlink, DFSUtil.string2Bytes(localName)); } Modified: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java?rev=1199422&r1=1199421&r2=1199422&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java (original) +++ hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java Tue Nov 8 20:02:18 2011 @@ -25,12 +25,14 @@ import java.io.InputStream; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.InetSocketAddress; +import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.util.Arrays; import java.util.List; import java.util.Map; +import java.util.StringTokenizer; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -62,7 +64,6 @@ import org.apache.hadoop.hdfs.server.nam import org.apache.hadoop.hdfs.web.resources.AccessTimeParam; import org.apache.hadoop.hdfs.web.resources.BlockSizeParam; import org.apache.hadoop.hdfs.web.resources.BufferSizeParam; -import org.apache.hadoop.hdfs.web.resources.TokenArgumentParam; import org.apache.hadoop.hdfs.web.resources.DeleteOpParam; import org.apache.hadoop.hdfs.web.resources.DestinationParam; import org.apache.hadoop.hdfs.web.resources.GetOpParam; @@ -81,6 +82,7 @@ import org.apache.hadoop.hdfs.web.resour import org.apache.hadoop.hdfs.web.resources.RenameOptionSetParam; import org.apache.hadoop.hdfs.web.resources.RenewerParam; import org.apache.hadoop.hdfs.web.resources.ReplicationParam; +import org.apache.hadoop.hdfs.web.resources.TokenArgumentParam; import org.apache.hadoop.hdfs.web.resources.UserParam; import org.apache.hadoop.io.Text; import org.apache.hadoop.ipc.RemoteException; @@ -388,9 +390,9 @@ public class WebHdfsFileSystem extends F private FileStatus makeQualified(HdfsFileStatus f, Path parent) { return new FileStatus(f.getLen(), f.isDir(), f.getReplication(), - f.getBlockSize(), f.getModificationTime(), - f.getAccessTime(), + f.getBlockSize(), f.getModificationTime(), f.getAccessTime(), f.getPermission(), f.getOwner(), f.getGroup(), + f.isSymlink() ? new Path(f.getSymlink()) : null, f.getFullPath(parent).makeQualified(getUri(), getWorkingDirectory())); } @@ -532,24 +534,84 @@ public class WebHdfsFileSystem extends F statistics.incrementReadOps(1); final HttpOpParam.Op op = GetOpParam.Op.OPEN; final URL url = toUrl(op, f, new BufferSizeParam(buffersize)); - ByteRangeInputStream str = getByteRangeInputStream(url); - return new FSDataInputStream(str); + return new FSDataInputStream(new OffsetUrlInputStream( + new OffsetUrlOpener(url), new OffsetUrlOpener(null))); } - private class URLOpener extends ByteRangeInputStream.URLOpener { - - public URLOpener(URL u) { - super(u); + class OffsetUrlOpener extends ByteRangeInputStream.URLOpener { + /** The url with offset parameter */ + private URL offsetUrl; + + OffsetUrlOpener(final URL url) { + super(url); } + /** Open connection with offset url. */ @Override - public HttpURLConnection openConnection() throws IOException { + protected HttpURLConnection openConnection() throws IOException { return getHttpUrlConnection(offsetUrl); } + + /** Setup offset url before open connection. */ + @Override + protected HttpURLConnection openConnection(final long offset) throws IOException { + offsetUrl = offset == 0L? url: new URL(url + "&" + new OffsetParam(offset)); + final HttpURLConnection conn = openConnection(); + conn.setRequestMethod("GET"); + return conn; + } } - - private ByteRangeInputStream getByteRangeInputStream(URL url) { - return new ByteRangeInputStream(new URLOpener(url), new URLOpener(null)); + + private static final String OFFSET_PARAM_PREFIX = OffsetParam.NAME + "="; + + /** Remove offset parameter, if there is any, from the url */ + static URL removeOffsetParam(final URL url) throws MalformedURLException { + String query = url.getQuery(); + if (query == null) { + return url; + } + final String lower = query.toLowerCase(); + if (!lower.startsWith(OFFSET_PARAM_PREFIX) + && !lower.contains("&" + OFFSET_PARAM_PREFIX)) { + return url; + } + + //rebuild query + StringBuilder b = null; + for(final StringTokenizer st = new StringTokenizer(query, "&"); + st.hasMoreTokens();) { + final String token = st.nextToken(); + if (!token.toLowerCase().startsWith(OFFSET_PARAM_PREFIX)) { + if (b == null) { + b = new StringBuilder("?").append(token); + } else { + b.append('&').append(token); + } + } + } + query = b == null? "": b.toString(); + + final String urlStr = url.toString(); + return new URL(urlStr.substring(0, urlStr.indexOf('?')) + query); + } + + static class OffsetUrlInputStream extends ByteRangeInputStream { + OffsetUrlInputStream(URLOpener o, URLOpener r) { + super(o, r); + } + + @Override + protected void checkResponseCode(final HttpURLConnection connection + ) throws IOException { + validateResponse(GetOpParam.Op.OPEN, connection); + } + + /** Remove offset parameter before returning the resolved url. */ + @Override + protected URL getResolvedUrl(final HttpURLConnection connection + ) throws MalformedURLException { + return removeOffsetParam(connection.getURL()); + } } @Override @@ -641,7 +703,7 @@ public class WebHdfsFileSystem extends F final long offset, final long length) throws IOException { statistics.incrementReadOps(1); - final HttpOpParam.Op op = GetOpParam.Op.GETFILEBLOCKLOCATIONS; + final HttpOpParam.Op op = GetOpParam.Op.GET_BLOCK_LOCATIONS; final Map m = run(op, p, new OffsetParam(offset), new LengthParam(length)); return DFSUtil.locatedBlocks2Locations(JsonUtil.toLocatedBlocks(m)); Modified: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java?rev=1199422&r1=1199421&r2=1199422&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java (original) +++ hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java Tue Nov 8 20:02:18 2011 @@ -24,7 +24,6 @@ public class GetOpParam extends HttpOpPa /** Get operations. */ public static enum Op implements HttpOpParam.Op { OPEN(HttpURLConnection.HTTP_OK), - GETFILEBLOCKLOCATIONS(HttpURLConnection.HTTP_OK), GETFILESTATUS(HttpURLConnection.HTTP_OK), LISTSTATUS(HttpURLConnection.HTTP_OK), @@ -33,6 +32,9 @@ public class GetOpParam extends HttpOpPa GETDELEGATIONTOKEN(HttpURLConnection.HTTP_OK), + /** GET_BLOCK_LOCATIONS is a private unstable op. */ + GET_BLOCK_LOCATIONS(HttpURLConnection.HTTP_OK), + NULL(HttpURLConnection.HTTP_NOT_IMPLEMENTED); final int expectedHttpResponseCode; Modified: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestByteRangeInputStream.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestByteRangeInputStream.java?rev=1199422&r1=1199421&r2=1199422&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestByteRangeInputStream.java (original) +++ hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestByteRangeInputStream.java Tue Nov 8 20:02:18 2011 @@ -31,10 +31,10 @@ import java.io.InputStream; import java.net.HttpURLConnection; import java.net.URL; -import org.apache.hadoop.hdfs.ByteRangeInputStream.URLOpener; import org.junit.Test; -class MockHttpURLConnection extends HttpURLConnection { +public class TestByteRangeInputStream { +public static class MockHttpURLConnection extends HttpURLConnection { public MockHttpURLConnection(URL u) { super(u); } @@ -85,54 +85,18 @@ class MockHttpURLConnection extends Http responseCode = resCode; } } - -public class TestByteRangeInputStream { - @Test - public void testRemoveOffset() throws IOException { - { //no offset - String s = "http://test/Abc?Length=99"; - assertEquals(s, ByteRangeInputStream.removeOffsetParam(new URL(s)).toString()); - } - - { //no parameters - String s = "http://test/Abc"; - assertEquals(s, ByteRangeInputStream.removeOffsetParam(new URL(s)).toString()); - } - - { //offset as first parameter - String s = "http://test/Abc?offset=10&Length=99"; - assertEquals("http://test/Abc?Length=99", - ByteRangeInputStream.removeOffsetParam(new URL(s)).toString()); - } - - { //offset as second parameter - String s = "http://test/Abc?op=read&OFFset=10&Length=99"; - assertEquals("http://test/Abc?op=read&Length=99", - ByteRangeInputStream.removeOffsetParam(new URL(s)).toString()); - } - - { //offset as last parameter - String s = "http://test/Abc?Length=99&offset=10"; - assertEquals("http://test/Abc?Length=99", - ByteRangeInputStream.removeOffsetParam(new URL(s)).toString()); - } - - { //offset as the only parameter - String s = "http://test/Abc?offset=10"; - assertEquals("http://test/Abc", - ByteRangeInputStream.removeOffsetParam(new URL(s)).toString()); - } - } @Test public void testByteRange() throws IOException { - URLOpener ospy = spy(new URLOpener(new URL("http://test/"))); + HftpFileSystem.RangeHeaderUrlOpener ospy = spy( + new HftpFileSystem.RangeHeaderUrlOpener(new URL("http://test/"))); doReturn(new MockHttpURLConnection(ospy.getURL())).when(ospy) .openConnection(); - URLOpener rspy = spy(new URLOpener((URL) null)); + HftpFileSystem.RangeHeaderUrlOpener rspy = spy( + new HftpFileSystem.RangeHeaderUrlOpener((URL) null)); doReturn(new MockHttpURLConnection(rspy.getURL())).when(rspy) .openConnection(); - ByteRangeInputStream is = new ByteRangeInputStream(ospy, rspy); + ByteRangeInputStream is = new HftpFileSystem.RangeHeaderInputStream(ospy, rspy); assertEquals("getPos wrong", 0, is.getPos()); Modified: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java?rev=1199422&r1=1199421&r2=1199422&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java (original) +++ hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java Tue Nov 8 20:02:18 2011 @@ -280,7 +280,7 @@ public class TestWebHdfsFileSystemContra final HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setRequestMethod(op.getType().toString()); conn.connect(); - assertEquals(HttpServletResponse.SC_FORBIDDEN, conn.getResponseCode()); + assertEquals(HttpServletResponse.SC_OK, conn.getResponseCode()); assertFalse(webhdfs.setReplication(dir, (short)1)); conn.disconnect();