Return-Path: X-Original-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Delivered-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 63ED27AD8 for ; Fri, 11 Nov 2011 04:32:09 +0000 (UTC) Received: (qmail 79753 invoked by uid 500); 11 Nov 2011 04:32:09 -0000 Delivered-To: apmail-hadoop-hdfs-commits-archive@hadoop.apache.org Received: (qmail 79730 invoked by uid 500); 11 Nov 2011 04:32:08 -0000 Mailing-List: contact hdfs-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hdfs-dev@hadoop.apache.org Delivered-To: mailing list hdfs-commits@hadoop.apache.org Received: (qmail 79722 invoked by uid 99); 11 Nov 2011 04:32:08 -0000 Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 11 Nov 2011 04:32:08 +0000 X-ASF-Spam-Status: No, hits=-1998.0 required=5.0 tests=ALL_TRUSTED,FB_GET_MEDS X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 11 Nov 2011 04:32:05 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id 1CCCF2388978; Fri, 11 Nov 2011 04:31:45 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1200736 - in /hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs: ./ src/main/java/ src/main/java/org/apache/hadoop/hdfs/server/common/ src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/ src/main/java/org... Date: Fri, 11 Nov 2011 04:31:44 -0000 To: hdfs-commits@hadoop.apache.org From: szetszwo@apache.org X-Mailer: svnmailer-1.0.8-patched Message-Id: <20111111043145.1CCCF2388978@eris.apache.org> Author: szetszwo Date: Fri Nov 11 04:31:43 2011 New Revision: 1200736 URL: http://svn.apache.org/viewvc?rev=1200736&view=rev Log: svn merge -c 1200734 from 0.23 for HDFS-2539. Added: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DoAsParam.java - copied unchanged from r1200734, hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DoAsParam.java hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java - copied unchanged from r1200734, hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java Modified: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/ (props changed) hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/ (props changed) hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ExceptionHandler.java hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/UserProvider.java hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationTokenForProxyUser.java hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java Propchange: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/ ------------------------------------------------------------------------------ --- svn:mergeinfo (original) +++ svn:mergeinfo Fri Nov 11 04:31:43 2011 @@ -1,5 +1,5 @@ -/hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs:1196812,1197335,1198905,1199403 -/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs:1161777,1161781,1162188,1162421,1162491,1162499,1162613,1162928,1162954,1162979,1163050,1163069,1163081,1163490,1163768,1164255,1164301,1164339,1166402,1167383,1167662,1170085,1170379,1170459,1170996,1171136,1171297,1171379,1171611,1172916,1173402,1173468,1175113,1176178,1176550,1176719,1176729,1176733,1177100,1177161,1177487,1177531,1177757,1177859,1177864,1177905,1179169,1179856,1179861,1180757,1183081,1183098,1183175,1183554,1186508,1187140,1189028,1189355,1189360,1189546,1189932,1189982,1190077,1190708,1195575,1195656,1195731,1195754,1196113,1196129,1197329,1198903,1199396 +/hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs:1196812,1197335,1198905,1199403,1200734 +/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs:1161777,1161781,1162188,1162421,1162491,1162499,1162613,1162928,1162954,1162979,1163050,1163069,1163081,1163490,1163768,1164255,1164301,1164339,1166402,1167383,1167662,1170085,1170379,1170459,1170996,1171136,1171297,1171379,1171611,1172916,1173402,1173468,1175113,1176178,1176550,1176719,1176729,1176733,1177100,1177161,1177487,1177531,1177757,1177859,1177864,1177905,1179169,1179856,1179861,1180757,1183081,1183098,1183175,1183554,1186508,1187140,1189028,1189355,1189360,1189546,1189932,1189982,1190077,1190708,1195575,1195656,1195731,1195754,1196113,1196129,1197329,1198903,1199396,1200731 /hadoop/core/branches/branch-0.19/hdfs:713112 /hadoop/hdfs/branches/HDFS-1052:987665-1095512 /hadoop/hdfs/branches/HDFS-265:796829-820463 Modified: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1200736&r1=1200735&r2=1200736&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original) +++ hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Fri Nov 11 04:31:43 2011 @@ -326,6 +326,9 @@ Release 0.23.0 - 2011-11-01 HDFS-2385. Support renew and cancel delegation tokens in webhdfs. (szetszwo) + HDFS-2539. Support doAs and GETHOMEDIRECTORY in webhdfs. + (szetszwo) + IMPROVEMENTS HDFS-1875. MiniDFSCluster hard-codes dfs.datanode.address to localhost Propchange: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/ ------------------------------------------------------------------------------ --- svn:mergeinfo (original) +++ svn:mergeinfo Fri Nov 11 04:31:43 2011 @@ -1,5 +1,5 @@ -/hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java:1196812,1197335,1198905,1199403 -/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java:1161777,1161781,1162188,1162421,1162491,1162499,1162613,1162928,1162954,1162979,1163050,1163069,1163081,1163490,1163768,1164255,1164301,1164339,1166402,1167383,1167662,1170085,1170379,1170459,1170996,1171136,1171297,1171379,1171611,1172916,1173402,1173468,1175113,1176178,1176550,1176719,1176729,1176733,1177100,1177161,1177487,1177531,1177757,1177859,1177864,1177905,1179169,1179856,1179861,1180757,1183081,1183098,1183175,1183554,1186508,1187140,1189028,1189355,1189360,1189546,1189932,1189982,1190077,1190708,1195575,1195656,1195731,1195754,1196113,1196129,1197329,1198903,1199396 +/hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java:1196812,1197335,1198905,1199403,1200734 +/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java:1161777,1161781,1162188,1162421,1162491,1162499,1162613,1162928,1162954,1162979,1163050,1163069,1163081,1163490,1163768,1164255,1164301,1164339,1166402,1167383,1167662,1170085,1170379,1170459,1170996,1171136,1171297,1171379,1171611,1172916,1173402,1173468,1175113,1176178,1176550,1176719,1176729,1176733,1177100,1177161,1177487,1177531,1177757,1177859,1177864,1177905,1179169,1179856,1179861,1180757,1183081,1183098,1183175,1183554,1186508,1187140,1189028,1189355,1189360,1189546,1189932,1189982,1190077,1190708,1195575,1195656,1195731,1195754,1196113,1196129,1197329,1198903,1199396,1200731 /hadoop/core/branches/branch-0.19/hdfs/src/java:713112 /hadoop/core/trunk/src/hdfs:776175-785643,785929-786278 /hadoop/hdfs/branches/HDFS-1052/src/java:987665-1095512 Modified: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java?rev=1200736&r1=1200735&r2=1200736&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java (original) +++ hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java Fri Nov 11 04:31:43 2011 @@ -56,6 +56,7 @@ import org.apache.hadoop.hdfs.server.blo import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer; import org.apache.hadoop.hdfs.web.resources.DelegationParam; +import org.apache.hadoop.hdfs.web.resources.DoAsParam; import org.apache.hadoop.hdfs.web.resources.UserParam; import org.apache.hadoop.http.HtmlQuoting; import org.apache.hadoop.io.Text; @@ -64,6 +65,8 @@ import org.apache.hadoop.security.Access import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; import org.apache.hadoop.security.authentication.util.KerberosName; +import org.apache.hadoop.security.authorize.AuthorizationException; +import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.VersionInfo; @@ -534,9 +537,10 @@ public class JspHelper { final boolean tryUgiParameter) throws IOException { final UserGroupInformation ugi; final String usernameFromQuery = getUsernameFromQuery(request, tryUgiParameter); + final String doAsUserFromQuery = request.getParameter(DoAsParam.NAME); if(UserGroupInformation.isSecurityEnabled()) { - final String user = request.getRemoteUser(); + final String remoteUser = request.getRemoteUser(); String tokenString = request.getParameter(DELEGATION_PARAMETER_NAME); if (tokenString != null) { Token token = @@ -560,26 +564,36 @@ public class JspHelper { } } ugi = id.getUser(); - checkUsername(ugi.getShortUserName(), usernameFromQuery); - checkUsername(ugi.getShortUserName(), user); + if (ugi.getRealUser() == null) { + //non-proxy case + checkUsername(ugi.getShortUserName(), usernameFromQuery); + checkUsername(null, doAsUserFromQuery); + } else { + //proxy case + checkUsername(ugi.getRealUser().getShortUserName(), usernameFromQuery); + checkUsername(ugi.getShortUserName(), doAsUserFromQuery); + ProxyUsers.authorize(ugi, request.getRemoteAddr(), conf); + } ugi.addToken(token); ugi.setAuthenticationMethod(AuthenticationMethod.TOKEN); } else { - if(user == null) { + if(remoteUser == null) { throw new IOException("Security enabled but user not " + "authenticated by filter"); } - ugi = UserGroupInformation.createRemoteUser(user); - checkUsername(ugi.getShortUserName(), usernameFromQuery); + final UserGroupInformation realUgi = UserGroupInformation.createRemoteUser(remoteUser); + checkUsername(realUgi.getShortUserName(), usernameFromQuery); // This is not necessarily true, could have been auth'ed by user-facing // filter - ugi.setAuthenticationMethod(secureAuthMethod); + realUgi.setAuthenticationMethod(secureAuthMethod); + ugi = initUGI(realUgi, doAsUserFromQuery, request, true, conf); } } else { // Security's not on, pull from url - ugi = usernameFromQuery == null? + final UserGroupInformation realUgi = usernameFromQuery == null? getDefaultWebUser(conf) // not specified in request : UserGroupInformation.createRemoteUser(usernameFromQuery); - ugi.setAuthenticationMethod(AuthenticationMethod.SIMPLE); + realUgi.setAuthenticationMethod(AuthenticationMethod.SIMPLE); + ugi = initUGI(realUgi, doAsUserFromQuery, request, false, conf); } if(LOG.isDebugEnabled()) @@ -587,12 +601,34 @@ public class JspHelper { return ugi; } + private static UserGroupInformation initUGI(final UserGroupInformation realUgi, + final String doAsUserFromQuery, final HttpServletRequest request, + final boolean isSecurityEnabled, final Configuration conf + ) throws AuthorizationException { + final UserGroupInformation ugi; + if (doAsUserFromQuery == null) { + //non-proxy case + ugi = realUgi; + } else { + //proxy case + ugi = UserGroupInformation.createProxyUser(doAsUserFromQuery, realUgi); + ugi.setAuthenticationMethod( + isSecurityEnabled? AuthenticationMethod.PROXY: AuthenticationMethod.SIMPLE); + ProxyUsers.authorize(ugi, request.getRemoteAddr(), conf); + } + return ugi; + } + /** * Expected user name should be a short name. */ private static void checkUsername(final String expected, final String name ) throws IOException { - if (name == null) { + if (expected == null && name != null) { + throw new IOException("Usernames not matched: expecting null but name=" + + name); + } + if (name == null) { //name is optional, null is okay return; } KerberosName u = new KerberosName(name); Modified: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java?rev=1200736&r1=1200735&r2=1200736&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java (original) +++ hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java Fri Nov 11 04:31:43 2011 @@ -69,6 +69,7 @@ import org.apache.hadoop.hdfs.web.resour import org.apache.hadoop.hdfs.web.resources.DelegationParam; import org.apache.hadoop.hdfs.web.resources.DeleteOpParam; import org.apache.hadoop.hdfs.web.resources.DestinationParam; +import org.apache.hadoop.hdfs.web.resources.DoAsParam; import org.apache.hadoop.hdfs.web.resources.GetOpParam; import org.apache.hadoop.hdfs.web.resources.GroupParam; import org.apache.hadoop.hdfs.web.resources.HttpOpParam; @@ -116,6 +117,21 @@ public class NamenodeWebHdfsMethods { private @Context HttpServletRequest request; private @Context HttpServletResponse response; + private void init(final UserGroupInformation ugi, + final DelegationParam delegation, + final UserParam username, final DoAsParam doAsUser, + final UriFsPathParam path, final HttpOpParam op, + final Param... parameters) throws IOException { + if (LOG.isTraceEnabled()) { + LOG.trace("HTTP " + op.getValue().getType() + ": " + op + ", " + path + + ", ugi=" + ugi + ", " + username + ", " + doAsUser + + Param.toSortedString(", ", parameters)); + } + + //clear content type + response.setContentType(null); + } + private static DatanodeInfo chooseDatanode(final NameNode namenode, final String path, final HttpOpParam.Op op, final long openOffset ) throws IOException { @@ -161,6 +177,7 @@ public class NamenodeWebHdfsMethods { private URI redirectURI(final NameNode namenode, final UserGroupInformation ugi, final DelegationParam delegation, + final UserParam username, final DoAsParam doAsUser, final String path, final HttpOpParam.Op op, final long openOffset, final Param... parameters) throws URISyntaxException, IOException { final DatanodeInfo dn = chooseDatanode(namenode, path, op, openOffset); @@ -168,7 +185,7 @@ public class NamenodeWebHdfsMethods { final String delegationQuery; if (!UserGroupInformation.isSecurityEnabled()) { //security disabled - delegationQuery = ""; + delegationQuery = Param.toSortedString("&", doAsUser, username); } else if (delegation.getValue() != null) { //client has provided a token delegationQuery = "&" + delegation; @@ -178,8 +195,7 @@ public class NamenodeWebHdfsMethods { namenode, ugi, request.getUserPrincipal().getName()); delegationQuery = "&" + new DelegationParam(t.encodeToUrlString()); } - final String query = op.toQueryString() - + '&' + new UserParam(ugi) + delegationQuery + final String query = op.toQueryString() + delegationQuery + Param.toSortedString("&", parameters); final String uripath = WebHdfsFileSystem.PATH_PREFIX + path; @@ -200,6 +216,10 @@ public class NamenodeWebHdfsMethods { @Context final UserGroupInformation ugi, @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) final DelegationParam delegation, + @QueryParam(UserParam.NAME) @DefaultValue(UserParam.DEFAULT) + final UserParam username, + @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT) + final DoAsParam doAsUser, @QueryParam(PutOpParam.NAME) @DefaultValue(PutOpParam.DEFAULT) final PutOpParam op, @QueryParam(DestinationParam.NAME) @DefaultValue(DestinationParam.DEFAULT) @@ -224,12 +244,13 @@ public class NamenodeWebHdfsMethods { final AccessTimeParam accessTime, @QueryParam(RenameOptionSetParam.NAME) @DefaultValue(RenameOptionSetParam.DEFAULT) final RenameOptionSetParam renameOptions, - @QueryParam(TokenArgumentParam.NAME) @DefaultValue(TokenArgumentParam.DEFAULT) + @QueryParam(TokenArgumentParam.NAME) @DefaultValue(TokenArgumentParam.DEFAULT) final TokenArgumentParam delegationTokenArgument ) throws IOException, InterruptedException { - return put(ugi, delegation, ROOT, op, destination, owner, group, - permission, overwrite, bufferSize, replication, blockSize, - modificationTime, accessTime, renameOptions, delegationTokenArgument); + return put(ugi, delegation, username, doAsUser, ROOT, op, destination, + owner, group, permission, overwrite, bufferSize, replication, + blockSize, modificationTime, accessTime, renameOptions, + delegationTokenArgument); } /** Handle HTTP PUT request. */ @@ -241,6 +262,10 @@ public class NamenodeWebHdfsMethods { @Context final UserGroupInformation ugi, @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) final DelegationParam delegation, + @QueryParam(UserParam.NAME) @DefaultValue(UserParam.DEFAULT) + final UserParam username, + @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT) + final DoAsParam doAsUser, @PathParam(UriFsPathParam.NAME) final UriFsPathParam path, @QueryParam(PutOpParam.NAME) @DefaultValue(PutOpParam.DEFAULT) final PutOpParam op, @@ -266,19 +291,13 @@ public class NamenodeWebHdfsMethods { final AccessTimeParam accessTime, @QueryParam(RenameOptionSetParam.NAME) @DefaultValue(RenameOptionSetParam.DEFAULT) final RenameOptionSetParam renameOptions, - @QueryParam(TokenArgumentParam.NAME) @DefaultValue(TokenArgumentParam.DEFAULT) + @QueryParam(TokenArgumentParam.NAME) @DefaultValue(TokenArgumentParam.DEFAULT) final TokenArgumentParam delegationTokenArgument ) throws IOException, InterruptedException { - if (LOG.isTraceEnabled()) { - LOG.trace(op + ": " + path + ", ugi=" + ugi - + Param.toSortedString(", ", destination, owner, group, permission, - overwrite, bufferSize, replication, blockSize, - modificationTime, accessTime, renameOptions)); - } - - //clear content type - response.setContentType(null); + init(ugi, delegation, username, doAsUser, path, op, destination, owner, + group, permission, overwrite, bufferSize, replication, blockSize, + modificationTime, accessTime, renameOptions, delegationTokenArgument); return ugi.doAs(new PrivilegedExceptionAction() { @Override @@ -294,8 +313,8 @@ public class NamenodeWebHdfsMethods { switch(op.getValue()) { case CREATE: { - final URI uri = redirectURI(namenode, ugi, delegation, fullpath, - op.getValue(), -1L, + final URI uri = redirectURI(namenode, ugi, delegation, username, doAsUser, + fullpath, op.getValue(), -1L, permission, overwrite, bufferSize, replication, blockSize); return Response.temporaryRedirect(uri).build(); } @@ -378,12 +397,16 @@ public class NamenodeWebHdfsMethods { @Context final UserGroupInformation ugi, @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) final DelegationParam delegation, + @QueryParam(UserParam.NAME) @DefaultValue(UserParam.DEFAULT) + final UserParam username, + @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT) + final DoAsParam doAsUser, @QueryParam(PostOpParam.NAME) @DefaultValue(PostOpParam.DEFAULT) final PostOpParam op, @QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT) final BufferSizeParam bufferSize ) throws IOException, InterruptedException { - return post(ugi, delegation, ROOT, op, bufferSize); + return post(ugi, delegation, username, doAsUser, ROOT, op, bufferSize); } /** Handle HTTP POST request. */ @@ -395,6 +418,10 @@ public class NamenodeWebHdfsMethods { @Context final UserGroupInformation ugi, @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) final DelegationParam delegation, + @QueryParam(UserParam.NAME) @DefaultValue(UserParam.DEFAULT) + final UserParam username, + @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT) + final DoAsParam doAsUser, @PathParam(UriFsPathParam.NAME) final UriFsPathParam path, @QueryParam(PostOpParam.NAME) @DefaultValue(PostOpParam.DEFAULT) final PostOpParam op, @@ -402,13 +429,7 @@ public class NamenodeWebHdfsMethods { final BufferSizeParam bufferSize ) throws IOException, InterruptedException { - if (LOG.isTraceEnabled()) { - LOG.trace(op + ": " + path + ", ugi=" + ugi - + Param.toSortedString(", ", bufferSize)); - } - - //clear content type - response.setContentType(null); + init(ugi, delegation, username, doAsUser, path, op, bufferSize); return ugi.doAs(new PrivilegedExceptionAction() { @Override @@ -422,8 +443,8 @@ public class NamenodeWebHdfsMethods { switch(op.getValue()) { case APPEND: { - final URI uri = redirectURI(namenode, ugi, delegation, fullpath, - op.getValue(), -1L, bufferSize); + final URI uri = redirectURI(namenode, ugi, delegation, username, doAsUser, + fullpath, op.getValue(), -1L, bufferSize); return Response.temporaryRedirect(uri).build(); } default: @@ -445,6 +466,10 @@ public class NamenodeWebHdfsMethods { @Context final UserGroupInformation ugi, @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) final DelegationParam delegation, + @QueryParam(UserParam.NAME) @DefaultValue(UserParam.DEFAULT) + final UserParam username, + @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT) + final DoAsParam doAsUser, @QueryParam(GetOpParam.NAME) @DefaultValue(GetOpParam.DEFAULT) final GetOpParam op, @QueryParam(OffsetParam.NAME) @DefaultValue(OffsetParam.DEFAULT) @@ -456,7 +481,8 @@ public class NamenodeWebHdfsMethods { @QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT) final BufferSizeParam bufferSize ) throws IOException, URISyntaxException, InterruptedException { - return get(ugi, delegation, ROOT, op, offset, length, renewer, bufferSize); + return get(ugi, delegation, username, doAsUser, ROOT, op, + offset, length, renewer, bufferSize); } /** Handle HTTP GET request. */ @@ -467,6 +493,10 @@ public class NamenodeWebHdfsMethods { @Context final UserGroupInformation ugi, @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) final DelegationParam delegation, + @QueryParam(UserParam.NAME) @DefaultValue(UserParam.DEFAULT) + final UserParam username, + @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT) + final DoAsParam doAsUser, @PathParam(UriFsPathParam.NAME) final UriFsPathParam path, @QueryParam(GetOpParam.NAME) @DefaultValue(GetOpParam.DEFAULT) final GetOpParam op, @@ -480,13 +510,8 @@ public class NamenodeWebHdfsMethods { final BufferSizeParam bufferSize ) throws IOException, InterruptedException { - if (LOG.isTraceEnabled()) { - LOG.trace(op + ": " + path + ", ugi=" + ugi - + Param.toSortedString(", ", offset, length, renewer, bufferSize)); - } - - //clear content type - response.setContentType(null); + init(ugi, delegation, username, doAsUser, path, op, + offset, length, renewer, bufferSize); return ugi.doAs(new PrivilegedExceptionAction() { @Override @@ -501,8 +526,8 @@ public class NamenodeWebHdfsMethods { switch(op.getValue()) { case OPEN: { - final URI uri = redirectURI(namenode, ugi, delegation, fullpath, - op.getValue(), offset.getValue(), offset, length, bufferSize); + final URI uri = redirectURI(namenode, ugi, delegation, username, doAsUser, + fullpath, op.getValue(), offset.getValue(), offset, length, bufferSize); return Response.temporaryRedirect(uri).build(); } case GET_BLOCK_LOCATIONS: @@ -537,17 +562,28 @@ public class NamenodeWebHdfsMethods { } case GETFILECHECKSUM: { - final URI uri = redirectURI(namenode, ugi, delegation, fullpath, - op.getValue(), -1L); + final URI uri = redirectURI(namenode, ugi, delegation, username, doAsUser, + fullpath, op.getValue(), -1L); return Response.temporaryRedirect(uri).build(); } case GETDELEGATIONTOKEN: { + if (delegation.getValue() != null) { + throw new IllegalArgumentException(delegation.getName() + + " parameter is not null."); + } final Token token = generateDelegationToken( namenode, ugi, renewer.getValue()); final String js = JsonUtil.toJsonString(token); return Response.ok(js).type(MediaType.APPLICATION_JSON).build(); } + case GETHOMEDIRECTORY: + { + final String js = JsonUtil.toJsonString( + org.apache.hadoop.fs.Path.class.getSimpleName(), + WebHdfsFileSystem.getHomeDirectoryString(ugi)); + return Response.ok(js).type(MediaType.APPLICATION_JSON).build(); + } default: throw new UnsupportedOperationException(op + " is not supported"); } @@ -609,12 +645,18 @@ public class NamenodeWebHdfsMethods { @Produces(MediaType.APPLICATION_JSON) public Response deleteRoot( @Context final UserGroupInformation ugi, + @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) + final DelegationParam delegation, + @QueryParam(UserParam.NAME) @DefaultValue(UserParam.DEFAULT) + final UserParam username, + @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT) + final DoAsParam doAsUser, @QueryParam(DeleteOpParam.NAME) @DefaultValue(DeleteOpParam.DEFAULT) final DeleteOpParam op, @QueryParam(RecursiveParam.NAME) @DefaultValue(RecursiveParam.DEFAULT) final RecursiveParam recursive ) throws IOException, InterruptedException { - return delete(ugi, ROOT, op, recursive); + return delete(ugi, delegation, username, doAsUser, ROOT, op, recursive); } /** Handle HTTP DELETE request. */ @@ -623,6 +665,12 @@ public class NamenodeWebHdfsMethods { @Produces(MediaType.APPLICATION_JSON) public Response delete( @Context final UserGroupInformation ugi, + @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) + final DelegationParam delegation, + @QueryParam(UserParam.NAME) @DefaultValue(UserParam.DEFAULT) + final UserParam username, + @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT) + final DoAsParam doAsUser, @PathParam(UriFsPathParam.NAME) final UriFsPathParam path, @QueryParam(DeleteOpParam.NAME) @DefaultValue(DeleteOpParam.DEFAULT) final DeleteOpParam op, @@ -630,13 +678,7 @@ public class NamenodeWebHdfsMethods { final RecursiveParam recursive ) throws IOException, InterruptedException { - if (LOG.isTraceEnabled()) { - LOG.trace(op + ": " + path + ", ugi=" + ugi - + Param.toSortedString(", ", recursive)); - } - - //clear content type - response.setContentType(null); + init(ugi, delegation, username, doAsUser, path, op, recursive); return ugi.doAs(new PrivilegedExceptionAction() { @Override Modified: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java?rev=1200736&r1=1200735&r2=1200736&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java (original) +++ hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java Fri Nov 11 04:31:43 2011 @@ -18,6 +18,12 @@ package org.apache.hadoop.hdfs.web; import java.io.IOException; +import java.util.ArrayList; +import java.util.Enumeration; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; import java.util.Properties; import javax.servlet.FilterChain; @@ -26,6 +32,7 @@ import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletRequestWrapper; import org.apache.hadoop.hdfs.web.resources.DelegationParam; import org.apache.hadoop.security.UserGroupInformation; @@ -67,15 +74,77 @@ public class AuthFilter extends Authenti @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain filterChain) throws IOException, ServletException { - HttpServletRequest httpRequest = (HttpServletRequest) request; - String tokenString = httpRequest - .getParameter(DelegationParam.NAME); + final HttpServletRequest httpRequest = toLowerCase((HttpServletRequest)request); + final String tokenString = httpRequest.getParameter(DelegationParam.NAME); if (tokenString != null) { //Token is present in the url, therefore token will be used for //authentication, bypass kerberos authentication. filterChain.doFilter(httpRequest, response); return; } - super.doFilter(request, response, filterChain); + super.doFilter(httpRequest, response, filterChain); + } + + private static HttpServletRequest toLowerCase(final HttpServletRequest request) { + @SuppressWarnings("unchecked") + final Map original = (Map)request.getParameterMap(); + if (!ParamFilter.containsUpperCase(original.keySet())) { + return request; + } + + final Map> m = new HashMap>(); + for(Map.Entry entry : original.entrySet()) { + final String key = entry.getKey().toLowerCase(); + List strings = m.get(key); + if (strings == null) { + strings = new ArrayList(); + m.put(key, strings); + } + for(String v : entry.getValue()) { + strings.add(v); + } + } + + return new HttpServletRequestWrapper(request) { + private Map parameters = null; + + @Override + public Map getParameterMap() { + if (parameters == null) { + parameters = new HashMap(); + for(Map.Entry> entry : m.entrySet()) { + final List a = entry.getValue(); + parameters.put(entry.getKey(), a.toArray(new String[a.size()])); + } + } + return parameters; + } + + @Override + public String getParameter(String name) { + final List a = m.get(name); + return a == null? null: a.get(0); + } + + @Override + public String[] getParameterValues(String name) { + return getParameterMap().get(name); + } + + @Override + public Enumeration getParameterNames() { + final Iterator i = m.keySet().iterator(); + return new Enumeration() { + @Override + public boolean hasMoreElements() { + return i.hasNext(); + } + @Override + public String nextElement() { + return i.next(); + } + }; + } + }; } } \ No newline at end of file Modified: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java?rev=1200736&r1=1200735&r2=1200736&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java (original) +++ hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java Fri Nov 11 04:31:43 2011 @@ -59,7 +59,7 @@ public class ParamFilter implements Reso } /** Do the strings contain upper case letters? */ - private static boolean containsUpperCase(final Iterable strings) { + static boolean containsUpperCase(final Iterable strings) { for(String s : strings) { for(int i = 0; i < s.length(); i++) { if (Character.isUpperCase(s.charAt(i))) { Modified: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java?rev=1200736&r1=1200735&r2=1200736&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java (original) +++ hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java Fri Nov 11 04:31:43 2011 @@ -92,6 +92,8 @@ import org.apache.hadoop.security.Securi import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authentication.client.AuthenticatedURL; import org.apache.hadoop.security.authentication.client.AuthenticationException; +import org.apache.hadoop.security.authorize.AuthorizationException; +import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenRenewer; @@ -201,9 +203,14 @@ public class WebHdfsFileSystem extends F } } + /** @return the home directory. */ + public static String getHomeDirectoryString(final UserGroupInformation ugi) { + return "/user/" + ugi.getShortUserName(); + } + @Override public Path getHomeDirectory() { - return makeQualified(new Path("/user/" + ugi.getShortUserName())); + return makeQualified(new Path(getHomeDirectoryString(ugi))); } @Override @@ -225,7 +232,7 @@ public class WebHdfsFileSystem extends F return f.isAbsolute()? f: new Path(workingDir, f); } - private static Map jsonParse(final InputStream in) throws IOException { + static Map jsonParse(final InputStream in) throws IOException { if (in == null) { throw new IOException("The input stream is null."); } @@ -251,13 +258,16 @@ public class WebHdfsFileSystem extends F final RemoteException re = JsonUtil.toRemoteException(m); throw re.unwrapRemoteException(AccessControlException.class, - DSQuotaExceededException.class, + InvalidToken.class, + AuthenticationException.class, + AuthorizationException.class, FileAlreadyExistsException.class, FileNotFoundException.class, ParentNotDirectoryException.class, + UnresolvedPathException.class, SafeModeException.class, - NSQuotaExceededException.class, - UnresolvedPathException.class); + DSQuotaExceededException.class, + NSQuotaExceededException.class); } return null; } @@ -352,7 +362,7 @@ public class WebHdfsFileSystem extends F /** * Two-step Create/Append: * Step 1) Submit a Http request with neither auto-redirect nor data. - * Step 2) Submit Http PUT with the URL from the Location header with data. + * Step 2) Submit another Http request with the URL from the Location header with data. * * The reason of having two-step create/append is for preventing clients to * send out the data before the redirect. This issue is addressed by the @@ -362,7 +372,7 @@ public class WebHdfsFileSystem extends F * 100-continue". The two-step create/append is a temporary workaround for * the software library bugs. */ - private static HttpURLConnection twoStepWrite(HttpURLConnection conn, + static HttpURLConnection twoStepWrite(HttpURLConnection conn, final HttpOpParam.Op op) throws IOException { //Step 1) Submit a Http request with neither auto-redirect nor data. conn.setInstanceFollowRedirects(false); @@ -372,7 +382,7 @@ public class WebHdfsFileSystem extends F final String redirect = conn.getHeaderField("Location"); conn.disconnect(); - //Step 2) Submit Http PUT with the URL from the Location header with data. + //Step 2) Submit another Http request with the URL from the Location header with data. conn = (HttpURLConnection)new URL(redirect).openConnection(); conn.setRequestMethod(op.getType().toString()); return conn; @@ -507,7 +517,7 @@ public class WebHdfsFileSystem extends F DFSConfigKeys.DFS_REPLICATION_DEFAULT); } - private FSDataOutputStream write(final HttpOpParam.Op op, + FSDataOutputStream write(final HttpOpParam.Op op, final HttpURLConnection conn, final int bufferSize) throws IOException { return new FSDataOutputStream(new BufferedOutputStream( conn.getOutputStream(), bufferSize), statistics) { @@ -516,7 +526,11 @@ public class WebHdfsFileSystem extends F try { super.close(); } finally { - validateResponse(op, conn); + try { + validateResponse(op, conn); + } finally { + conn.disconnect(); + } } } }; @@ -630,7 +644,7 @@ public class WebHdfsFileSystem extends F } static class OffsetUrlInputStream extends ByteRangeInputStream { - OffsetUrlInputStream(URLOpener o, URLOpener r) { + OffsetUrlInputStream(OffsetUrlOpener o, OffsetUrlOpener r) { super(o, r); } @@ -673,7 +687,7 @@ public class WebHdfsFileSystem extends F final HttpOpParam.Op op = GetOpParam.Op.GETDELEGATIONTOKEN; final Map m = run(op, null, new RenewerParam(renewer)); final Token token = JsonUtil.toDelegationToken(m); - token.setService(new Text(getCanonicalServiceName())); + SecurityUtil.setTokenService(token, nnAddr); return token; } Modified: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ExceptionHandler.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ExceptionHandler.java?rev=1200736&r1=1200735&r2=1200736&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ExceptionHandler.java (original) +++ hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ExceptionHandler.java Fri Nov 11 04:31:43 2011 @@ -30,14 +30,25 @@ import javax.ws.rs.ext.Provider; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hdfs.web.JsonUtil; +import org.apache.hadoop.ipc.RemoteException; +import org.apache.hadoop.security.authorize.AuthorizationException; import com.sun.jersey.api.ParamException; +import com.sun.jersey.api.container.ContainerException; /** Handle exceptions. */ @Provider public class ExceptionHandler implements ExceptionMapper { public static final Log LOG = LogFactory.getLog(ExceptionHandler.class); + private static Exception toCause(Exception e) { + final Throwable t = e.getCause(); + if (t != null && t instanceof Exception) { + e = (Exception)e.getCause(); + } + return e; + } + private @Context HttpServletResponse response; @Override @@ -55,12 +66,20 @@ public class ExceptionHandler implements e = new IllegalArgumentException("Invalid value for webhdfs parameter \"" + paramexception.getParameterName() + "\": " + e.getCause().getMessage(), e); - } + } + if (e instanceof ContainerException) { + e = toCause(e); + } + if (e instanceof RemoteException) { + e = ((RemoteException)e).unwrapRemoteException(); + } //Map response status final Response.Status s; if (e instanceof SecurityException) { s = Response.Status.UNAUTHORIZED; + } else if (e instanceof AuthorizationException) { + s = Response.Status.UNAUTHORIZED; } else if (e instanceof FileNotFoundException) { s = Response.Status.NOT_FOUND; } else if (e instanceof IOException) { Modified: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java?rev=1200736&r1=1200735&r2=1200736&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java (original) +++ hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java Fri Nov 11 04:31:43 2011 @@ -30,6 +30,7 @@ public class GetOpParam extends HttpOpPa GETCONTENTSUMMARY(HttpURLConnection.HTTP_OK), GETFILECHECKSUM(HttpURLConnection.HTTP_OK), + GETHOMEDIRECTORY(HttpURLConnection.HTTP_OK), GETDELEGATIONTOKEN(HttpURLConnection.HTTP_OK), /** GET_BLOCK_LOCATIONS is a private unstable op. */ Modified: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java?rev=1200736&r1=1200735&r2=1200736&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java (original) +++ hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java Fri Nov 11 04:31:43 2011 @@ -58,7 +58,7 @@ public abstract class HttpOpParam m = WebHdfsTestUtil.connectAndGetJson(conn, HttpServletResponse.SC_OK); + conn.disconnect(); + + final Object responsePath = m.get(Path.class.getSimpleName()); + WebHdfsTestUtil.LOG.info("responsePath=" + responsePath); + Assert.assertEquals("/user/" + PROXY_USER, responsePath); + } + + { + //test GETHOMEDIRECTORY with DOas + final URL url = WebHdfsTestUtil.toUrl(webhdfs, + GetOpParam.Op.GETHOMEDIRECTORY, root, new DoAsParam(PROXY_USER) { + @Override + public String getName() { + return "DOas"; + } + }); + final HttpURLConnection conn = (HttpURLConnection) url.openConnection(); + final Map m = WebHdfsTestUtil.connectAndGetJson(conn, HttpServletResponse.SC_OK); + conn.disconnect(); + + final Object responsePath = m.get(Path.class.getSimpleName()); + WebHdfsTestUtil.LOG.info("responsePath=" + responsePath); + Assert.assertEquals("/user/" + PROXY_USER, responsePath); + } + + { + //test create file with doAs + final Path f = new Path("/testWebHdfsDoAs/a.txt"); + final PutOpParam.Op op = PutOpParam.Op.CREATE; + final URL url = WebHdfsTestUtil.toUrl(webhdfs, op, f, new DoAsParam(PROXY_USER)); + HttpURLConnection conn = (HttpURLConnection) url.openConnection(); + conn = WebHdfsTestUtil.twoStepWrite(conn, op); + final FSDataOutputStream out = WebHdfsTestUtil.write(webhdfs, op, conn, 4096); + out.write("Hello, webhdfs user!".getBytes()); + out.close(); + + final FileStatus status = webhdfs.getFileStatus(f); + WebHdfsTestUtil.LOG.info("status.getOwner()=" + status.getOwner()); + Assert.assertEquals(PROXY_USER, status.getOwner()); + } + } } Modified: hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java?rev=1200736&r1=1200735&r2=1200736&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java (original) +++ hadoop/common/branches/branch-0.23.0/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java Fri Nov 11 04:31:43 2011 @@ -23,9 +23,8 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStreamReader; import java.net.HttpURLConnection; -import java.net.URI; import java.net.URL; -import java.security.PrivilegedExceptionAction; +import java.util.Map; import javax.servlet.http.HttpServletResponse; @@ -34,12 +33,12 @@ import org.apache.hadoop.fs.BlockLocatio import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystemContractBaseTest; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.MiniDFSCluster; +import org.apache.hadoop.hdfs.web.resources.DoAsParam; import org.apache.hadoop.hdfs.web.resources.GetOpParam; import org.apache.hadoop.hdfs.web.resources.HttpOpParam; import org.apache.hadoop.hdfs.web.resources.PutOpParam; @@ -51,6 +50,8 @@ public class TestWebHdfsFileSystemContra private static final Configuration conf = new Configuration(); private static final MiniDFSCluster cluster; private String defaultWorkingDirectory; + + private UserGroupInformation ugi; static { conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true); @@ -68,20 +69,11 @@ public class TestWebHdfsFileSystemContra @Override protected void setUp() throws Exception { - final String uri = WebHdfsFileSystem.SCHEME + "://" - + conf.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY); - //get file system as a non-superuser final UserGroupInformation current = UserGroupInformation.getCurrentUser(); - final UserGroupInformation ugi = UserGroupInformation.createUserForTesting( + ugi = UserGroupInformation.createUserForTesting( current.getShortUserName() + "x", new String[]{"user"}); - fs = ugi.doAs(new PrivilegedExceptionAction() { - @Override - public FileSystem run() throws Exception { - return FileSystem.get(new URI(uri), conf); - } - }); - + fs = WebHdfsTestUtil.getWebHdfsFileSystemAs(ugi, conf); defaultWorkingDirectory = fs.getWorkingDirectory().toUri().getPath(); } @@ -263,9 +255,29 @@ public class TestWebHdfsFileSystemContra public void testResponseCode() throws IOException { final WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)fs; + final Path root = new Path("/"); final Path dir = new Path("/test/testUrl"); assertTrue(webhdfs.mkdirs(dir)); + {//test GETHOMEDIRECTORY + final URL url = webhdfs.toUrl(GetOpParam.Op.GETHOMEDIRECTORY, root); + final HttpURLConnection conn = (HttpURLConnection) url.openConnection(); + final Map m = WebHdfsTestUtil.connectAndGetJson( + conn, HttpServletResponse.SC_OK); + assertEquals(WebHdfsFileSystem.getHomeDirectoryString(ugi), + m.get(Path.class.getSimpleName())); + conn.disconnect(); + } + + {//test GETHOMEDIRECTORY with unauthorized doAs + final URL url = webhdfs.toUrl(GetOpParam.Op.GETHOMEDIRECTORY, root, + new DoAsParam(ugi.getShortUserName() + "proxy")); + final HttpURLConnection conn = (HttpURLConnection) url.openConnection(); + conn.connect(); + assertEquals(HttpServletResponse.SC_UNAUTHORIZED, conn.getResponseCode()); + conn.disconnect(); + } + {//test set owner with empty parameters final URL url = webhdfs.toUrl(PutOpParam.Op.SETOWNER, dir); final HttpURLConnection conn = (HttpURLConnection) url.openConnection();