Return-Path: X-Original-To: apmail-hadoop-common-commits-archive@www.apache.org Delivered-To: apmail-hadoop-common-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 2C40B7B24 for ; Fri, 11 Nov 2011 04:34:50 +0000 (UTC) Received: (qmail 81698 invoked by uid 500); 11 Nov 2011 04:34:49 -0000 Delivered-To: apmail-hadoop-common-commits-archive@hadoop.apache.org Received: (qmail 81629 invoked by uid 500); 11 Nov 2011 04:34:49 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: common-dev@hadoop.apache.org Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 81621 invoked by uid 99); 11 Nov 2011 04:34:48 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 11 Nov 2011 04:34:48 +0000 X-ASF-Spam-Status: No, hits=-1994.5 required=5.0 tests=ALL_TRUSTED,FB_GET_MEDS,URIBL_BLACK X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 11 Nov 2011 04:34:40 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id 57E3723889BB for ; Fri, 11 Nov 2011 04:34:18 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1200737 - in /hadoop/common/branches/branch-0.20-security-205: ./ src/hdfs/org/apache/hadoop/hdfs/server/namenode/ src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/ src/hdfs/org/apache/hadoop/hdfs/web/ src/hdfs/org/apache/hado... Date: Fri, 11 Nov 2011 04:34:17 -0000 To: common-commits@hadoop.apache.org From: szetszwo@apache.org X-Mailer: svnmailer-1.0.8-patched Message-Id: <20111111043418.57E3723889BB@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: szetszwo Date: Fri Nov 11 04:34:16 2011 New Revision: 1200737 URL: http://svn.apache.org/viewvc?rev=1200737&view=rev Log: svn merge -c 1200733 from branch-0.20-security for HDFS-2539. Added: hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/resources/DoAsParam.java - copied unchanged from r1200733, hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/DoAsParam.java hadoop/common/branches/branch-0.20-security-205/src/test/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java - copied unchanged from r1200733, hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java Modified: hadoop/common/branches/branch-0.20-security-205/ (props changed) hadoop/common/branches/branch-0.20-security-205/CHANGES.txt (contents, props changed) hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/AuthFilter.java hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/ParamFilter.java hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/resources/ExceptionHandler.java hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/resources/GetOpParam.java hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/resources/UserProvider.java hadoop/common/branches/branch-0.20-security-205/src/test/org/apache/hadoop/hdfs/security/TestDelegationTokenForProxyUser.java hadoop/common/branches/branch-0.20-security-205/src/test/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java Propchange: hadoop/common/branches/branch-0.20-security-205/ ------------------------------------------------------------------------------ --- svn:mergeinfo (original) +++ svn:mergeinfo Fri Nov 11 04:34:16 2011 @@ -1,6 +1,6 @@ /hadoop/common/branches/branch-0.20:826138,826568,829987,831184,833001,880632,898713,909245,909723,960946,990003,1044225 /hadoop/common/branches/branch-0.20-append:955380,955398,955448,956329 -/hadoop/common/branches/branch-0.20-security:1170042,1170087,1170997,1171137,1171380,1171613,1171891,1171905,1172184,1172188,1172190,1172192,1173470,1174471,1175114,1176179,1176720,1177907,1179036,1179171,1179519,1179857,1183086,1183101,1183176,1183556,1186509,1187141,1189029,1189361,1190079,1190089,1190100,1195737,1197330,1198906,1199401,1199571 +/hadoop/common/branches/branch-0.20-security:1170042,1170087,1170997,1171137,1171380,1171613,1171891,1171905,1172184,1172188,1172190,1172192,1173470,1174471,1175114,1176179,1176720,1177907,1179036,1179171,1179519,1179857,1183086,1183101,1183176,1183556,1186509,1187141,1189029,1189361,1190079,1190089,1190100,1195737,1197330,1198906,1199401,1199571,1200733 /hadoop/common/branches/branch-0.20-security-203:1096071,1097011,1097249,1097269,1097281,1097966,1098816,1098819,1098823,1098827,1098832,1098839,1098854,1098863,1099088,1099191,1099324,1099330,1099333,1102071,1128115 /hadoop/common/branches/branch-0.20-security-204:1128390,1147228,1148069,1149316,1154413 /hadoop/core/branches/branch-0.19:713112 Modified: hadoop/common/branches/branch-0.20-security-205/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-205/CHANGES.txt?rev=1200737&r1=1200736&r2=1200737&view=diff ============================================================================== --- hadoop/common/branches/branch-0.20-security-205/CHANGES.txt (original) +++ hadoop/common/branches/branch-0.20-security-205/CHANGES.txt Fri Nov 11 04:34:16 2011 @@ -2,6 +2,11 @@ Hadoop Change Log Release 0.20.205.1 - unreleased + NEW FEATURES + + HDFS-2539. Support doAs and GETHOMEDIRECTORY in webhdfs. + (szetszwo) + IMPROVEMENTS HDFS-2427. Change the default permission in webhdfs to 755 and add range Propchange: hadoop/common/branches/branch-0.20-security-205/CHANGES.txt ------------------------------------------------------------------------------ --- svn:mergeinfo (original) +++ svn:mergeinfo Fri Nov 11 04:34:16 2011 @@ -1,6 +1,6 @@ /hadoop/common/branches/branch-0.20/CHANGES.txt:826138,826568,829987,831184,833001,880632,898713,909245,909723,960946,990003,1044225 /hadoop/common/branches/branch-0.20-append/CHANGES.txt:955380,955398,955448,956329 -/hadoop/common/branches/branch-0.20-security/CHANGES.txt:1170042,1170087,1170997,1171137,1171181,1171380,1171613,1171891,1171905,1172184,1172188,1172190,1172192,1173470,1173843,1174326,1174471,1174476,1174482,1175114,1176179,1176182,1176270,1176276,1176675,1176720,1177031,1177036,1177098,1177101,1177907,1178074,1179036,1179171,1179471,1179519,1179713,1179722,1179857,1179919,1183086,1183101,1183176,1183556,1186509,1187141,1189029,1189361,1190079,1190089,1190100,1195737,1197330,1198906,1199401,1199571 +/hadoop/common/branches/branch-0.20-security/CHANGES.txt:1170042,1170087,1170997,1171137,1171181,1171380,1171613,1171891,1171905,1172184,1172188,1172190,1172192,1173470,1173843,1174326,1174471,1174476,1174482,1175114,1176179,1176182,1176270,1176276,1176675,1176720,1177031,1177036,1177098,1177101,1177907,1178074,1179036,1179171,1179471,1179519,1179713,1179722,1179857,1179919,1183086,1183101,1183176,1183556,1186509,1187141,1189029,1189361,1190079,1190089,1190100,1195737,1197330,1198906,1199401,1199571,1200733 /hadoop/common/branches/branch-0.20-security-203/CHANGES.txt:1096071,1097011,1097249,1097269,1097281,1097966,1098816,1098819,1098823,1098827,1098832,1098839,1098854,1098863,1099088,1099191,1099324,1099330,1099333,1102071,1128115 /hadoop/common/branches/branch-0.20-security-204/CHANGES.txt:1128390,1147228,1148069,1149316,1154413,1159730,1161741 /hadoop/core/branches/branch-0.18/CHANGES.txt:727226 Modified: hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java?rev=1200737&r1=1200736&r2=1200737&view=diff ============================================================================== --- hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java (original) +++ hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java Fri Nov 11 04:34:16 2011 @@ -50,6 +50,7 @@ import org.apache.hadoop.hdfs.server.com import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.web.resources.DelegationParam; +import org.apache.hadoop.hdfs.web.resources.DoAsParam; import org.apache.hadoop.hdfs.web.resources.UserParam; import org.apache.hadoop.http.HtmlQuoting; import org.apache.hadoop.net.NetUtils; @@ -58,6 +59,8 @@ import org.apache.hadoop.security.Kerber import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; +import org.apache.hadoop.security.authorize.AuthorizationException; +import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.StringUtils; @@ -465,9 +468,10 @@ public class JspHelper { final boolean tryUgiParameter) throws IOException { final UserGroupInformation ugi; final String usernameFromQuery = getUsernameFromQuery(request, tryUgiParameter); + final String doAsUserFromQuery = request.getParameter(DoAsParam.NAME); if(UserGroupInformation.isSecurityEnabled()) { - final String user = request.getRemoteUser(); + final String remoteUser = request.getRemoteUser(); String tokenString = request.getParameter(DELEGATION_PARAMETER_NAME); if (tokenString != null) { Token token = @@ -490,26 +494,36 @@ public class JspHelper { } } ugi = id.getUser(); - checkUsername(ugi.getShortUserName(), usernameFromQuery); - checkUsername(ugi.getShortUserName(), user); - ugi.addToken(token); + if (ugi.getRealUser() == null) { + //non-proxy case + checkUsername(ugi.getShortUserName(), usernameFromQuery); + checkUsername(null, doAsUserFromQuery); + } else { + //proxy case + checkUsername(ugi.getRealUser().getShortUserName(), usernameFromQuery); + checkUsername(ugi.getShortUserName(), doAsUserFromQuery); + ProxyUsers.authorize(ugi, request.getRemoteAddr(), conf); + } + ugi.addToken(token); ugi.setAuthenticationMethod(AuthenticationMethod.TOKEN); } else { - if(user == null) { + if(remoteUser == null) { throw new IOException("Security enabled but user not " + "authenticated by filter"); } - ugi = UserGroupInformation.createRemoteUser(user); - checkUsername(ugi.getShortUserName(), usernameFromQuery); + final UserGroupInformation realUgi = UserGroupInformation.createRemoteUser(remoteUser); + checkUsername(realUgi.getShortUserName(), usernameFromQuery); // This is not necessarily true, could have been auth'ed by user-facing // filter - ugi.setAuthenticationMethod(secureAuthMethod); + realUgi.setAuthenticationMethod(secureAuthMethod); + ugi = initUGI(realUgi, doAsUserFromQuery, request, true, conf); } } else { // Security's not on, pull from url - ugi = usernameFromQuery == null? + final UserGroupInformation realUgi = usernameFromQuery == null? getDefaultWebUser(conf) // not specified in request : UserGroupInformation.createRemoteUser(usernameFromQuery); - ugi.setAuthenticationMethod(AuthenticationMethod.SIMPLE); + realUgi.setAuthenticationMethod(AuthenticationMethod.SIMPLE); + ugi = initUGI(realUgi, doAsUserFromQuery, request, false, conf); } if(LOG.isDebugEnabled()) @@ -517,12 +531,34 @@ public class JspHelper { return ugi; } + private static UserGroupInformation initUGI(final UserGroupInformation realUgi, + final String doAsUserFromQuery, final HttpServletRequest request, + final boolean isSecurityEnabled, final Configuration conf + ) throws AuthorizationException { + final UserGroupInformation ugi; + if (doAsUserFromQuery == null) { + //non-proxy case + ugi = realUgi; + } else { + //proxy case + ugi = UserGroupInformation.createProxyUser(doAsUserFromQuery, realUgi); + ugi.setAuthenticationMethod( + isSecurityEnabled? AuthenticationMethod.PROXY: AuthenticationMethod.SIMPLE); + ProxyUsers.authorize(ugi, request.getRemoteAddr(), conf); + } + return ugi; + } + /** * Expected user name should be a short name. */ private static void checkUsername(final String expected, final String name ) throws IOException { - if (name == null) { + if (expected == null && name != null) { + throw new IOException("Usernames not matched: expecting null but name=" + + name); + } + if (name == null) { //name is optional, null is okay return; } KerberosName u = new KerberosName(name); Modified: hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java?rev=1200737&r1=1200736&r2=1200737&view=diff ============================================================================== --- hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java (original) +++ hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java Fri Nov 11 04:34:16 2011 @@ -65,6 +65,7 @@ import org.apache.hadoop.hdfs.web.resour import org.apache.hadoop.hdfs.web.resources.DelegationParam; import org.apache.hadoop.hdfs.web.resources.DeleteOpParam; import org.apache.hadoop.hdfs.web.resources.DestinationParam; +import org.apache.hadoop.hdfs.web.resources.DoAsParam; import org.apache.hadoop.hdfs.web.resources.GetOpParam; import org.apache.hadoop.hdfs.web.resources.GroupParam; import org.apache.hadoop.hdfs.web.resources.HttpOpParam; @@ -110,6 +111,21 @@ public class NamenodeWebHdfsMethods { private @Context HttpServletRequest request; private @Context HttpServletResponse response; + private void init(final UserGroupInformation ugi, + final DelegationParam delegation, + final UserParam username, final DoAsParam doAsUser, + final UriFsPathParam path, final HttpOpParam op, + final Param... parameters) throws IOException { + if (LOG.isTraceEnabled()) { + LOG.trace("HTTP " + op.getValue().getType() + ": " + op + ", " + path + + ", ugi=" + ugi + ", " + username + ", " + doAsUser + + Param.toSortedString(", ", parameters)); + } + + //clear content type + response.setContentType(null); + } + private static DatanodeInfo chooseDatanode(final NameNode namenode, final String path, final HttpOpParam.Op op, final long openOffset ) throws IOException { @@ -153,6 +169,7 @@ public class NamenodeWebHdfsMethods { private URI redirectURI(final NameNode namenode, final UserGroupInformation ugi, final DelegationParam delegation, + final UserParam username, final DoAsParam doAsUser, final String path, final HttpOpParam.Op op, final long openOffset, final Param... parameters) throws URISyntaxException, IOException { final DatanodeInfo dn = chooseDatanode(namenode, path, op, openOffset); @@ -160,7 +177,7 @@ public class NamenodeWebHdfsMethods { final String delegationQuery; if (!UserGroupInformation.isSecurityEnabled()) { //security disabled - delegationQuery = ""; + delegationQuery = Param.toSortedString("&", doAsUser, username); } else if (delegation.getValue() != null) { //client has provided a token delegationQuery = "&" + delegation; @@ -170,8 +187,7 @@ public class NamenodeWebHdfsMethods { namenode, ugi, request.getUserPrincipal().getName()); delegationQuery = "&" + new DelegationParam(t.encodeToUrlString()); } - final String query = op.toQueryString() - + '&' + new UserParam(ugi) + delegationQuery + final String query = op.toQueryString() + delegationQuery + Param.toSortedString("&", parameters); final String uripath = WebHdfsFileSystem.PATH_PREFIX + path; @@ -192,6 +208,10 @@ public class NamenodeWebHdfsMethods { @Context final UserGroupInformation ugi, @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) final DelegationParam delegation, + @QueryParam(UserParam.NAME) @DefaultValue(UserParam.DEFAULT) + final UserParam username, + @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT) + final DoAsParam doAsUser, @QueryParam(PutOpParam.NAME) @DefaultValue(PutOpParam.DEFAULT) final PutOpParam op, @QueryParam(DestinationParam.NAME) @DefaultValue(DestinationParam.DEFAULT) @@ -217,9 +237,9 @@ public class NamenodeWebHdfsMethods { @QueryParam(TokenArgumentParam.NAME) @DefaultValue(TokenArgumentParam.DEFAULT) final TokenArgumentParam delegationTokenArgument ) throws IOException, InterruptedException { - return put(ugi, delegation, ROOT, op, destination, owner, group, - permission, overwrite, bufferSize, replication, blockSize, - modificationTime, accessTime, delegationTokenArgument); + return put(ugi, delegation, username, doAsUser, ROOT, op, destination, + owner, group, permission, overwrite, bufferSize, replication, + blockSize, modificationTime, accessTime, delegationTokenArgument); } /** Handle HTTP PUT request. */ @@ -231,6 +251,10 @@ public class NamenodeWebHdfsMethods { @Context final UserGroupInformation ugi, @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) final DelegationParam delegation, + @QueryParam(UserParam.NAME) @DefaultValue(UserParam.DEFAULT) + final UserParam username, + @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT) + final DoAsParam doAsUser, @PathParam(UriFsPathParam.NAME) final UriFsPathParam path, @QueryParam(PutOpParam.NAME) @DefaultValue(PutOpParam.DEFAULT) final PutOpParam op, @@ -258,15 +282,9 @@ public class NamenodeWebHdfsMethods { final TokenArgumentParam delegationTokenArgument ) throws IOException, InterruptedException { - if (LOG.isTraceEnabled()) { - LOG.trace(op + ": " + path + ", ugi=" + ugi - + Param.toSortedString(", ", destination, owner, group, permission, - overwrite, bufferSize, replication, blockSize, - modificationTime, accessTime)); - } - - //clear content type - response.setContentType(null); + init(ugi, delegation, username, doAsUser, path, op, destination, owner, + group, permission, overwrite, bufferSize, replication, blockSize, + modificationTime, accessTime, delegationTokenArgument); return ugi.doAs(new PrivilegedExceptionAction() { @Override @@ -281,8 +299,8 @@ public class NamenodeWebHdfsMethods { switch(op.getValue()) { case CREATE: { - final URI uri = redirectURI(namenode, ugi, delegation, fullpath, - op.getValue(), -1L, + final URI uri = redirectURI(namenode, ugi, delegation, username, doAsUser, + fullpath, op.getValue(), -1L, permission, overwrite, bufferSize, replication, blockSize); return Response.temporaryRedirect(uri).build(); } @@ -358,12 +376,16 @@ public class NamenodeWebHdfsMethods { @Context final UserGroupInformation ugi, @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) final DelegationParam delegation, + @QueryParam(UserParam.NAME) @DefaultValue(UserParam.DEFAULT) + final UserParam username, + @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT) + final DoAsParam doAsUser, @QueryParam(PostOpParam.NAME) @DefaultValue(PostOpParam.DEFAULT) final PostOpParam op, @QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT) final BufferSizeParam bufferSize ) throws IOException, InterruptedException { - return post(ugi, delegation, ROOT, op, bufferSize); + return post(ugi, delegation, username, doAsUser, ROOT, op, bufferSize); } /** Handle HTTP POST request. */ @@ -375,6 +397,10 @@ public class NamenodeWebHdfsMethods { @Context final UserGroupInformation ugi, @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) final DelegationParam delegation, + @QueryParam(UserParam.NAME) @DefaultValue(UserParam.DEFAULT) + final UserParam username, + @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT) + final DoAsParam doAsUser, @PathParam(UriFsPathParam.NAME) final UriFsPathParam path, @QueryParam(PostOpParam.NAME) @DefaultValue(PostOpParam.DEFAULT) final PostOpParam op, @@ -382,13 +408,7 @@ public class NamenodeWebHdfsMethods { final BufferSizeParam bufferSize ) throws IOException, InterruptedException { - if (LOG.isTraceEnabled()) { - LOG.trace(op + ": " + path + ", ugi=" + ugi - + Param.toSortedString(", ", bufferSize)); - } - - //clear content type - response.setContentType(null); + init(ugi, delegation, username, doAsUser, path, op, bufferSize); return ugi.doAs(new PrivilegedExceptionAction() { @Override @@ -402,8 +422,8 @@ public class NamenodeWebHdfsMethods { switch(op.getValue()) { case APPEND: { - final URI uri = redirectURI(namenode, ugi, delegation, fullpath, - op.getValue(), -1L, bufferSize); + final URI uri = redirectURI(namenode, ugi, delegation, username, doAsUser, + fullpath, op.getValue(), -1L, bufferSize); return Response.temporaryRedirect(uri).build(); } default: @@ -425,6 +445,10 @@ public class NamenodeWebHdfsMethods { @Context final UserGroupInformation ugi, @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) final DelegationParam delegation, + @QueryParam(UserParam.NAME) @DefaultValue(UserParam.DEFAULT) + final UserParam username, + @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT) + final DoAsParam doAsUser, @QueryParam(GetOpParam.NAME) @DefaultValue(GetOpParam.DEFAULT) final GetOpParam op, @QueryParam(OffsetParam.NAME) @DefaultValue(OffsetParam.DEFAULT) @@ -436,7 +460,8 @@ public class NamenodeWebHdfsMethods { @QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT) final BufferSizeParam bufferSize ) throws IOException, URISyntaxException, InterruptedException { - return get(ugi, delegation, ROOT, op, offset, length, renewer, bufferSize); + return get(ugi, delegation, username, doAsUser, ROOT, op, + offset, length, renewer, bufferSize); } /** Handle HTTP GET request. */ @@ -447,6 +472,10 @@ public class NamenodeWebHdfsMethods { @Context final UserGroupInformation ugi, @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) final DelegationParam delegation, + @QueryParam(UserParam.NAME) @DefaultValue(UserParam.DEFAULT) + final UserParam username, + @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT) + final DoAsParam doAsUser, @PathParam(UriFsPathParam.NAME) final UriFsPathParam path, @QueryParam(GetOpParam.NAME) @DefaultValue(GetOpParam.DEFAULT) final GetOpParam op, @@ -460,13 +489,8 @@ public class NamenodeWebHdfsMethods { final BufferSizeParam bufferSize ) throws IOException, InterruptedException { - if (LOG.isTraceEnabled()) { - LOG.trace(op + ": " + path + ", ugi=" + ugi - + Param.toSortedString(", ", offset, length, renewer, bufferSize)); - } - - //clear content type - response.setContentType(null); + init(ugi, delegation, username, doAsUser, path, op, + offset, length, renewer, bufferSize); return ugi.doAs(new PrivilegedExceptionAction() { @Override @@ -480,8 +504,8 @@ public class NamenodeWebHdfsMethods { switch(op.getValue()) { case OPEN: { - final URI uri = redirectURI(namenode, ugi, delegation, fullpath, - op.getValue(), offset.getValue(), offset, length, bufferSize); + final URI uri = redirectURI(namenode, ugi, delegation, username, doAsUser, + fullpath, op.getValue(), offset.getValue(), offset, length, bufferSize); return Response.temporaryRedirect(uri).build(); } case GET_BLOCK_LOCATIONS: @@ -516,17 +540,28 @@ public class NamenodeWebHdfsMethods { } case GETFILECHECKSUM: { - final URI uri = redirectURI(namenode, ugi, delegation, fullpath, - op.getValue(), -1L); + final URI uri = redirectURI(namenode, ugi, delegation, username, doAsUser, + fullpath, op.getValue(), -1L); return Response.temporaryRedirect(uri).build(); } case GETDELEGATIONTOKEN: { + if (delegation.getValue() != null) { + throw new IllegalArgumentException(delegation.getName() + + " parameter is not null."); + } final Token token = generateDelegationToken( namenode, ugi, renewer.getValue()); final String js = JsonUtil.toJsonString(token); return Response.ok(js).type(MediaType.APPLICATION_JSON).build(); } + case GETHOMEDIRECTORY: + { + final String js = JsonUtil.toJsonString( + org.apache.hadoop.fs.Path.class.getSimpleName(), + WebHdfsFileSystem.getHomeDirectoryString(ugi)); + return Response.ok(js).type(MediaType.APPLICATION_JSON).build(); + } default: throw new UnsupportedOperationException(op + " is not supported"); } @@ -588,12 +623,18 @@ public class NamenodeWebHdfsMethods { @Produces(MediaType.APPLICATION_JSON) public Response deleteRoot( @Context final UserGroupInformation ugi, + @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) + final DelegationParam delegation, + @QueryParam(UserParam.NAME) @DefaultValue(UserParam.DEFAULT) + final UserParam username, + @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT) + final DoAsParam doAsUser, @QueryParam(DeleteOpParam.NAME) @DefaultValue(DeleteOpParam.DEFAULT) final DeleteOpParam op, @QueryParam(RecursiveParam.NAME) @DefaultValue(RecursiveParam.DEFAULT) final RecursiveParam recursive ) throws IOException, InterruptedException { - return delete(ugi, ROOT, op, recursive); + return delete(ugi, delegation, username, doAsUser, ROOT, op, recursive); } /** Handle HTTP DELETE request. */ @@ -602,6 +643,12 @@ public class NamenodeWebHdfsMethods { @Produces(MediaType.APPLICATION_JSON) public Response delete( @Context final UserGroupInformation ugi, + @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) + final DelegationParam delegation, + @QueryParam(UserParam.NAME) @DefaultValue(UserParam.DEFAULT) + final UserParam username, + @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT) + final DoAsParam doAsUser, @PathParam(UriFsPathParam.NAME) final UriFsPathParam path, @QueryParam(DeleteOpParam.NAME) @DefaultValue(DeleteOpParam.DEFAULT) final DeleteOpParam op, @@ -609,13 +656,7 @@ public class NamenodeWebHdfsMethods { final RecursiveParam recursive ) throws IOException, InterruptedException { - if (LOG.isTraceEnabled()) { - LOG.trace(op + ": " + path + ", ugi=" + ugi - + Param.toSortedString(", ", recursive)); - } - - //clear content type - response.setContentType(null); + init(ugi, delegation, username, doAsUser, path, op, recursive); return ugi.doAs(new PrivilegedExceptionAction() { @Override Modified: hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/AuthFilter.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/AuthFilter.java?rev=1200737&r1=1200736&r2=1200737&view=diff ============================================================================== --- hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/AuthFilter.java (original) +++ hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/AuthFilter.java Fri Nov 11 04:34:16 2011 @@ -18,6 +18,12 @@ package org.apache.hadoop.hdfs.web; import java.io.IOException; +import java.util.ArrayList; +import java.util.Enumeration; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; import java.util.Properties; import javax.servlet.FilterChain; @@ -26,6 +32,7 @@ import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletRequestWrapper; import org.apache.hadoop.hdfs.web.resources.DelegationParam; import org.apache.hadoop.security.UserGroupInformation; @@ -67,15 +74,77 @@ public class AuthFilter extends Authenti @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain filterChain) throws IOException, ServletException { - HttpServletRequest httpRequest = (HttpServletRequest) request; - String tokenString = httpRequest - .getParameter(DelegationParam.NAME); + final HttpServletRequest httpRequest = toLowerCase((HttpServletRequest)request); + final String tokenString = httpRequest.getParameter(DelegationParam.NAME); if (tokenString != null) { //Token is present in the url, therefore token will be used for //authentication, bypass kerberos authentication. filterChain.doFilter(httpRequest, response); return; } - super.doFilter(request, response, filterChain); + super.doFilter(httpRequest, response, filterChain); + } + + private static HttpServletRequest toLowerCase(final HttpServletRequest request) { + @SuppressWarnings("unchecked") + final Map original = (Map)request.getParameterMap(); + if (!ParamFilter.containsUpperCase(original.keySet())) { + return request; + } + + final Map> m = new HashMap>(); + for(Map.Entry entry : original.entrySet()) { + final String key = entry.getKey().toLowerCase(); + List strings = m.get(key); + if (strings == null) { + strings = new ArrayList(); + m.put(key, strings); + } + for(String v : entry.getValue()) { + strings.add(v); + } + } + + return new HttpServletRequestWrapper(request) { + private Map parameters = null; + + @Override + public Map getParameterMap() { + if (parameters == null) { + parameters = new HashMap(); + for(Map.Entry> entry : m.entrySet()) { + final List a = entry.getValue(); + parameters.put(entry.getKey(), a.toArray(new String[a.size()])); + } + } + return parameters; + } + + @Override + public String getParameter(String name) { + final List a = m.get(name); + return a == null? null: a.get(0); + } + + @Override + public String[] getParameterValues(String name) { + return getParameterMap().get(name); + } + + @Override + public Enumeration getParameterNames() { + final Iterator i = m.keySet().iterator(); + return new Enumeration() { + @Override + public boolean hasMoreElements() { + return i.hasNext(); + } + @Override + public String nextElement() { + return i.next(); + } + }; + } + }; } } \ No newline at end of file Modified: hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/ParamFilter.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/ParamFilter.java?rev=1200737&r1=1200736&r2=1200737&view=diff ============================================================================== --- hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/ParamFilter.java (original) +++ hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/ParamFilter.java Fri Nov 11 04:34:16 2011 @@ -59,7 +59,7 @@ public class ParamFilter implements Reso } /** Do the strings contain upper case letters? */ - private static boolean containsUpperCase(final Iterable strings) { + static boolean containsUpperCase(final Iterable strings) { for(String s : strings) { for(int i = 0; i < s.length(); i++) { if (Character.isUpperCase(s.charAt(i))) { Modified: hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java?rev=1200737&r1=1200736&r2=1200737&view=diff ============================================================================== --- hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java (original) +++ hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java Fri Nov 11 04:34:16 2011 @@ -39,6 +39,7 @@ import org.apache.hadoop.fs.BlockLocatio import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum; @@ -48,7 +49,6 @@ import org.apache.hadoop.hdfs.ByteRangeI import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException; -import org.apache.hadoop.hdfs.protocol.FSConstants; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.NSQuotaExceededException; import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; @@ -86,6 +86,8 @@ import org.apache.hadoop.security.Securi import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authentication.client.AuthenticatedURL; import org.apache.hadoop.security.authentication.client.AuthenticationException; +import org.apache.hadoop.security.authorize.AuthorizationException; +import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenRenewer; @@ -195,9 +197,14 @@ public class WebHdfsFileSystem extends F } } + /** @return the home directory. */ + public static String getHomeDirectoryString(final UserGroupInformation ugi) { + return "/user/" + ugi.getShortUserName(); + } + @Override public Path getHomeDirectory() { - return makeQualified(new Path("/user/" + ugi.getShortUserName())); + return makeQualified(new Path(getHomeDirectoryString(ugi))); } @Override @@ -219,7 +226,7 @@ public class WebHdfsFileSystem extends F return f.isAbsolute()? f: new Path(workingDir, f); } - private static Map jsonParse(final InputStream in) throws IOException { + static Map jsonParse(final InputStream in) throws IOException { if (in == null) { throw new IOException("The input stream is null."); } @@ -245,9 +252,13 @@ public class WebHdfsFileSystem extends F final RemoteException re = JsonUtil.toRemoteException(m); throw re.unwrapRemoteException(AccessControlException.class, - DSQuotaExceededException.class, + InvalidToken.class, + AuthenticationException.class, + AuthorizationException.class, + FileAlreadyExistsException.class, FileNotFoundException.class, SafeModeException.class, + DSQuotaExceededException.class, NSQuotaExceededException.class); } return null; @@ -343,7 +354,7 @@ public class WebHdfsFileSystem extends F /** * Two-step Create/Append: * Step 1) Submit a Http request with neither auto-redirect nor data. - * Step 2) Submit Http PUT with the URL from the Location header with data. + * Step 2) Submit another Http request with the URL from the Location header with data. * * The reason of having two-step create/append is for preventing clients to * send out the data before the redirect. This issue is addressed by the @@ -353,7 +364,7 @@ public class WebHdfsFileSystem extends F * 100-continue". The two-step create/append is a temporary workaround for * the software library bugs. */ - private static HttpURLConnection twoStepWrite(HttpURLConnection conn, + static HttpURLConnection twoStepWrite(HttpURLConnection conn, final HttpOpParam.Op op) throws IOException { //Step 1) Submit a Http request with neither auto-redirect nor data. conn.setInstanceFollowRedirects(false); @@ -363,7 +374,7 @@ public class WebHdfsFileSystem extends F final String redirect = conn.getHeaderField("Location"); conn.disconnect(); - //Step 2) Submit Http PUT with the URL from the Location header with data. + //Step 2) Submit another Http request with the URL from the Location header with data. conn = (HttpURLConnection)new URL(redirect).openConnection(); conn.setRequestMethod(op.getType().toString()); return conn; @@ -415,8 +426,7 @@ public class WebHdfsFileSystem extends F private FileStatus makeQualified(HdfsFileStatus f, Path parent) { return new FileStatus(f.getLen(), f.isDir(), f.getReplication(), - f.getBlockSize(), f.getModificationTime(), - f.getAccessTime(), + f.getBlockSize(), f.getModificationTime(), f.getAccessTime(), f.getPermission(), f.getOwner(), f.getGroup(), f.getFullPath(parent).makeQualified(this)); // fully-qualify path } @@ -478,7 +488,8 @@ public class WebHdfsFileSystem extends F @Override public long getDefaultBlockSize() { - return getConf().getLong("dfs.block.size", FSConstants.DEFAULT_BLOCK_SIZE); + return getConf().getLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, + DFSConfigKeys.DFS_BLOCK_SIZE_DEFAULT); } @Override @@ -487,7 +498,7 @@ public class WebHdfsFileSystem extends F DFSConfigKeys.DFS_REPLICATION_DEFAULT); } - private FSDataOutputStream write(final HttpOpParam.Op op, + FSDataOutputStream write(final HttpOpParam.Op op, final HttpURLConnection conn, final int bufferSize) throws IOException { return new FSDataOutputStream(new BufferedOutputStream( conn.getOutputStream(), bufferSize), statistics) { @@ -496,7 +507,11 @@ public class WebHdfsFileSystem extends F try { super.close(); } finally { - validateResponse(op, conn); + try { + validateResponse(op, conn); + } finally { + conn.disconnect(); + } } } }; @@ -661,9 +676,11 @@ public class WebHdfsFileSystem extends F } @Override - public synchronized void setDelegationToken( + public void setDelegationToken( final Token token) { - delegationToken = token; + synchronized(this) { + delegationToken = token; + } } private synchronized long renewDelegationToken(final Token token Modified: hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/resources/ExceptionHandler.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/resources/ExceptionHandler.java?rev=1200737&r1=1200736&r2=1200737&view=diff ============================================================================== --- hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/resources/ExceptionHandler.java (original) +++ hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/resources/ExceptionHandler.java Fri Nov 11 04:34:16 2011 @@ -30,14 +30,25 @@ import javax.ws.rs.ext.Provider; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hdfs.web.JsonUtil; +import org.apache.hadoop.ipc.RemoteException; +import org.apache.hadoop.security.authorize.AuthorizationException; import com.sun.jersey.api.ParamException; +import com.sun.jersey.api.container.ContainerException; /** Handle exceptions. */ @Provider public class ExceptionHandler implements ExceptionMapper { public static final Log LOG = LogFactory.getLog(ExceptionHandler.class); + private static Exception toCause(Exception e) { + final Throwable t = e.getCause(); + if (t != null && t instanceof Exception) { + e = (Exception)e.getCause(); + } + return e; + } + private @Context HttpServletResponse response; @Override @@ -55,12 +66,20 @@ public class ExceptionHandler implements e = new IllegalArgumentException("Invalid value for webhdfs parameter \"" + paramexception.getParameterName() + "\": " + e.getCause().getMessage(), e); - } + } + if (e instanceof ContainerException) { + e = toCause(e); + } + if (e instanceof RemoteException) { + e = ((RemoteException)e).unwrapRemoteException(); + } //Map response status final Response.Status s; if (e instanceof SecurityException) { s = Response.Status.UNAUTHORIZED; + } else if (e instanceof AuthorizationException) { + s = Response.Status.UNAUTHORIZED; } else if (e instanceof FileNotFoundException) { s = Response.Status.NOT_FOUND; } else if (e instanceof IOException) { Modified: hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/resources/GetOpParam.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/resources/GetOpParam.java?rev=1200737&r1=1200736&r2=1200737&view=diff ============================================================================== --- hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/resources/GetOpParam.java (original) +++ hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/resources/GetOpParam.java Fri Nov 11 04:34:16 2011 @@ -30,6 +30,7 @@ public class GetOpParam extends HttpOpPa GETCONTENTSUMMARY(HttpURLConnection.HTTP_OK), GETFILECHECKSUM(HttpURLConnection.HTTP_OK), + GETHOMEDIRECTORY(HttpURLConnection.HTTP_OK), GETDELEGATIONTOKEN(HttpURLConnection.HTTP_OK), /** GET_BLOCK_LOCATIONS is a private unstable op. */ Modified: hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java?rev=1200737&r1=1200736&r2=1200737&view=diff ============================================================================== --- hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java (original) +++ hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java Fri Nov 11 04:34:16 2011 @@ -58,7 +58,7 @@ public abstract class HttpOpParam run() throws IOException { DistributedFileSystem dfs = (DistributedFileSystem) cluster .getFileSystem(); - return dfs.getDelegationToken(new Text("RenewerUser")); + return dfs.getDelegationToken("RenewerUser"); } }); DelegationTokenIdentifier identifier = new DelegationTokenIdentifier(); @@ -136,4 +154,64 @@ public class TestDelegationTokenForProxy } } + @Test + public void testWebHdfsDoAs() throws Exception { + WebHdfsTestUtil.LOG.info("START: testWebHdfsDoAs()"); + ((Log4JLogger)NamenodeWebHdfsMethods.LOG).getLogger().setLevel(Level.ALL); + ((Log4JLogger)ExceptionHandler.LOG).getLogger().setLevel(Level.ALL); + final UserGroupInformation ugi = UserGroupInformation.createRemoteUser(REAL_USER); + WebHdfsTestUtil.LOG.info("ugi.getShortUserName()=" + ugi.getShortUserName()); + final WebHdfsFileSystem webhdfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(ugi, config); + + final Path root = new Path("/"); + cluster.getFileSystem().setPermission(root, new FsPermission((short)0777)); + + { + //test GETHOMEDIRECTORY with doAs + final URL url = WebHdfsTestUtil.toUrl(webhdfs, + GetOpParam.Op.GETHOMEDIRECTORY, root, new DoAsParam(PROXY_USER)); + final HttpURLConnection conn = (HttpURLConnection) url.openConnection(); + final Map m = WebHdfsTestUtil.connectAndGetJson(conn, HttpServletResponse.SC_OK); + conn.disconnect(); + + final Object responsePath = m.get(Path.class.getSimpleName()); + WebHdfsTestUtil.LOG.info("responsePath=" + responsePath); + Assert.assertEquals("/user/" + PROXY_USER, responsePath); + } + + { + //test GETHOMEDIRECTORY with DOas + final URL url = WebHdfsTestUtil.toUrl(webhdfs, + GetOpParam.Op.GETHOMEDIRECTORY, root, new DoAsParam(PROXY_USER) { + @Override + public String getName() { + return "DOas"; + } + }); + final HttpURLConnection conn = (HttpURLConnection) url.openConnection(); + final Map m = WebHdfsTestUtil.connectAndGetJson(conn, HttpServletResponse.SC_OK); + conn.disconnect(); + + final Object responsePath = m.get(Path.class.getSimpleName()); + WebHdfsTestUtil.LOG.info("responsePath=" + responsePath); + Assert.assertEquals("/user/" + PROXY_USER, responsePath); + } + + { + //test create file with doAs + final Path f = new Path("/testWebHdfsDoAs/a.txt"); + final PutOpParam.Op op = PutOpParam.Op.CREATE; + final URL url = WebHdfsTestUtil.toUrl(webhdfs, op, f, new DoAsParam(PROXY_USER)); + WebHdfsTestUtil.LOG.info("url=" + url); + HttpURLConnection conn = (HttpURLConnection) url.openConnection(); + conn = WebHdfsTestUtil.twoStepWrite(conn, op); + final FSDataOutputStream out = WebHdfsTestUtil.write(webhdfs, op, conn, 4096); + out.write("Hello, webhdfs user!".getBytes()); + out.close(); + + final FileStatus status = webhdfs.getFileStatus(f); + WebHdfsTestUtil.LOG.info("status.getOwner()=" + status.getOwner()); + Assert.assertEquals(PROXY_USER, status.getOwner()); + } + } } Modified: hadoop/common/branches/branch-0.20-security-205/src/test/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-205/src/test/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java?rev=1200737&r1=1200736&r2=1200737&view=diff ============================================================================== --- hadoop/common/branches/branch-0.20-security-205/src/test/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java (original) +++ hadoop/common/branches/branch-0.20-security-205/src/test/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java Fri Nov 11 04:34:16 2011 @@ -23,9 +23,8 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStreamReader; import java.net.HttpURLConnection; -import java.net.URI; import java.net.URL; -import java.security.PrivilegedExceptionAction; +import java.util.Map; import javax.servlet.http.HttpServletResponse; @@ -40,6 +39,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.MiniDFSCluster; +import org.apache.hadoop.hdfs.web.resources.DoAsParam; import org.apache.hadoop.hdfs.web.resources.GetOpParam; import org.apache.hadoop.hdfs.web.resources.HttpOpParam; import org.apache.hadoop.hdfs.web.resources.PutOpParam; @@ -51,6 +51,8 @@ public class TestWebHdfsFileSystemContra private static final Configuration conf = new Configuration(); private static final MiniDFSCluster cluster; private String defaultWorkingDirectory; + + private UserGroupInformation ugi; static { conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true); @@ -68,20 +70,11 @@ public class TestWebHdfsFileSystemContra @Override protected void setUp() throws Exception { - final String uri = WebHdfsFileSystem.SCHEME + "://" - + conf.get("dfs.http.address"); - //get file system as a non-superuser final UserGroupInformation current = UserGroupInformation.getCurrentUser(); - final UserGroupInformation ugi = UserGroupInformation.createUserForTesting( + ugi = UserGroupInformation.createUserForTesting( current.getShortUserName() + "x", new String[]{"user"}); - fs = ugi.doAs(new PrivilegedExceptionAction() { - @Override - public FileSystem run() throws Exception { - return FileSystem.get(new URI(uri), conf); - } - }); - + fs = WebHdfsTestUtil.getWebHdfsFileSystemAs(ugi, conf); defaultWorkingDirectory = fs.getWorkingDirectory().toUri().getPath(); } @@ -283,9 +276,29 @@ public class TestWebHdfsFileSystemContra public void testResponseCode() throws IOException { final WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)fs; + final Path root = new Path("/"); final Path dir = new Path("/test/testUrl"); assertTrue(webhdfs.mkdirs(dir)); + {//test GETHOMEDIRECTORY + final URL url = webhdfs.toUrl(GetOpParam.Op.GETHOMEDIRECTORY, root); + final HttpURLConnection conn = (HttpURLConnection) url.openConnection(); + final Map m = WebHdfsTestUtil.connectAndGetJson( + conn, HttpServletResponse.SC_OK); + assertEquals(WebHdfsFileSystem.getHomeDirectoryString(ugi), + m.get(Path.class.getSimpleName())); + conn.disconnect(); + } + + {//test GETHOMEDIRECTORY with unauthorized doAs + final URL url = webhdfs.toUrl(GetOpParam.Op.GETHOMEDIRECTORY, root, + new DoAsParam(ugi.getShortUserName() + "proxy")); + final HttpURLConnection conn = (HttpURLConnection) url.openConnection(); + conn.connect(); + assertEquals(HttpServletResponse.SC_UNAUTHORIZED, conn.getResponseCode()); + conn.disconnect(); + } + {//test set owner with empty parameters final URL url = webhdfs.toUrl(PutOpParam.Op.SETOWNER, dir); final HttpURLConnection conn = (HttpURLConnection) url.openConnection();