hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From szets...@apache.org
Subject svn commit: r1200733 - in /hadoop/common/branches/branch-0.20-security: ./ src/hdfs/org/apache/hadoop/hdfs/server/namenode/ src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/ src/hdfs/org/apache/hadoop/hdfs/web/ src/hdfs/org/apache/hadoop/h...
Date Fri, 11 Nov 2011 04:21:45 GMT
Author: szetszwo
Date: Fri Nov 11 04:21:44 2011
New Revision: 1200733

URL: http://svn.apache.org/viewvc?rev=1200733&view=rev
Log:
HDFS-2539. Support doAs and GETHOMEDIRECTORY in webhdfs.

Added:
    hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/DoAsParam.java
    hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java
Modified:
    hadoop/common/branches/branch-0.20-security/CHANGES.txt
    hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java
    hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
    hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/AuthFilter.java
    hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/ParamFilter.java
    hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
    hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/ExceptionHandler.java
    hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/GetOpParam.java
    hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java
    hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/UserProvider.java
    hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/security/TestDelegationTokenForProxyUser.java
    hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java

Modified: hadoop/common/branches/branch-0.20-security/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/CHANGES.txt?rev=1200733&r1=1200732&r2=1200733&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/CHANGES.txt (original)
+++ hadoop/common/branches/branch-0.20-security/CHANGES.txt Fri Nov 11 04:21:44 2011
@@ -61,6 +61,11 @@ Release 0.20.206.0 - unreleased
 
 Release 0.20.205.1 - unreleased
 
+  NEW FEATURES
+
+    HDFS-2539. Support doAs and GETHOMEDIRECTORY in webhdfs.
+    (szetszwo)
+
   IMPROVEMENTS
 
     HDFS-2427. Change the default permission in webhdfs to 755 and add range

Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java?rev=1200733&r1=1200732&r2=1200733&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java Fri Nov 11 04:21:44 2011
@@ -50,6 +50,7 @@ import org.apache.hadoop.hdfs.server.com
 import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.web.resources.DelegationParam;
+import org.apache.hadoop.hdfs.web.resources.DoAsParam;
 import org.apache.hadoop.hdfs.web.resources.UserParam;
 import org.apache.hadoop.http.HtmlQuoting;
 import org.apache.hadoop.net.NetUtils;
@@ -58,6 +59,8 @@ import org.apache.hadoop.security.Kerber
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
+import org.apache.hadoop.security.authorize.AuthorizationException;
+import org.apache.hadoop.security.authorize.ProxyUsers;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.util.StringUtils;
 
@@ -465,9 +468,10 @@ public class JspHelper {
       final boolean tryUgiParameter) throws IOException {
     final UserGroupInformation ugi;
     final String usernameFromQuery = getUsernameFromQuery(request, tryUgiParameter);
+    final String doAsUserFromQuery = request.getParameter(DoAsParam.NAME);
 
     if(UserGroupInformation.isSecurityEnabled()) {
-      final String user = request.getRemoteUser();
+      final String remoteUser = request.getRemoteUser();
       String tokenString = request.getParameter(DELEGATION_PARAMETER_NAME);
       if (tokenString != null) {
         Token<DelegationTokenIdentifier> token = 
@@ -490,26 +494,36 @@ public class JspHelper {
           }
         }
         ugi = id.getUser();
-        checkUsername(ugi.getShortUserName(), usernameFromQuery);
-        checkUsername(ugi.getShortUserName(), user);
-        ugi.addToken(token);        
+        if (ugi.getRealUser() == null) {
+          //non-proxy case
+          checkUsername(ugi.getShortUserName(), usernameFromQuery);
+          checkUsername(null, doAsUserFromQuery);
+        } else {
+          //proxy case
+          checkUsername(ugi.getRealUser().getShortUserName(), usernameFromQuery);
+          checkUsername(ugi.getShortUserName(), doAsUserFromQuery);
+          ProxyUsers.authorize(ugi, request.getRemoteAddr(), conf);
+        }
+        ugi.addToken(token);
         ugi.setAuthenticationMethod(AuthenticationMethod.TOKEN);
       } else {
-        if(user == null) {
+        if(remoteUser == null) {
           throw new IOException("Security enabled but user not " +
                                 "authenticated by filter");
         }
-        ugi = UserGroupInformation.createRemoteUser(user);
-        checkUsername(ugi.getShortUserName(), usernameFromQuery);
+        final UserGroupInformation realUgi = UserGroupInformation.createRemoteUser(remoteUser);
+        checkUsername(realUgi.getShortUserName(), usernameFromQuery);
         // This is not necessarily true, could have been auth'ed by user-facing
         // filter
-        ugi.setAuthenticationMethod(secureAuthMethod);
+        realUgi.setAuthenticationMethod(secureAuthMethod);
+        ugi = initUGI(realUgi, doAsUserFromQuery, request, true, conf);
       }
     } else { // Security's not on, pull from url
-      ugi = usernameFromQuery == null?
+      final UserGroupInformation realUgi = usernameFromQuery == null?
           getDefaultWebUser(conf) // not specified in request
           : UserGroupInformation.createRemoteUser(usernameFromQuery);
-      ugi.setAuthenticationMethod(AuthenticationMethod.SIMPLE);
+      realUgi.setAuthenticationMethod(AuthenticationMethod.SIMPLE);
+      ugi = initUGI(realUgi, doAsUserFromQuery, request, false, conf);
     }
     
     if(LOG.isDebugEnabled())
@@ -517,12 +531,34 @@ public class JspHelper {
     return ugi;
   }
 
+  private static UserGroupInformation initUGI(final UserGroupInformation realUgi,
+      final String doAsUserFromQuery, final HttpServletRequest request,
+      final boolean isSecurityEnabled, final Configuration conf
+      ) throws AuthorizationException {
+    final UserGroupInformation ugi;
+    if (doAsUserFromQuery == null) {
+      //non-proxy case
+      ugi = realUgi;
+    } else {
+      //proxy case
+      ugi = UserGroupInformation.createProxyUser(doAsUserFromQuery, realUgi);
+      ugi.setAuthenticationMethod(
+          isSecurityEnabled? AuthenticationMethod.PROXY: AuthenticationMethod.SIMPLE);
+      ProxyUsers.authorize(ugi, request.getRemoteAddr(), conf);
+    }
+    return ugi;
+  }
+
   /**
    * Expected user name should be a short name.
    */
   private static void checkUsername(final String expected, final String name
       ) throws IOException {
-    if (name == null) {
+    if (expected == null && name != null) {
+      throw new IOException("Usernames not matched: expecting null but name="
+          + name);
+    }
+    if (name == null) { //name is optional, null is okay
       return;
     }
     KerberosName u = new KerberosName(name);

Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java?rev=1200733&r1=1200732&r2=1200733&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java Fri Nov 11 04:21:44 2011
@@ -65,6 +65,7 @@ import org.apache.hadoop.hdfs.web.resour
 import org.apache.hadoop.hdfs.web.resources.DelegationParam;
 import org.apache.hadoop.hdfs.web.resources.DeleteOpParam;
 import org.apache.hadoop.hdfs.web.resources.DestinationParam;
+import org.apache.hadoop.hdfs.web.resources.DoAsParam;
 import org.apache.hadoop.hdfs.web.resources.GetOpParam;
 import org.apache.hadoop.hdfs.web.resources.GroupParam;
 import org.apache.hadoop.hdfs.web.resources.HttpOpParam;
@@ -110,6 +111,21 @@ public class NamenodeWebHdfsMethods {
   private @Context HttpServletRequest request;
   private @Context HttpServletResponse response;
 
+  private void init(final UserGroupInformation ugi,
+      final DelegationParam delegation,
+      final UserParam username, final DoAsParam doAsUser,
+      final UriFsPathParam path, final HttpOpParam<?> op,
+      final Param<?, ?>... parameters) throws IOException {
+    if (LOG.isTraceEnabled()) {
+      LOG.trace("HTTP " + op.getValue().getType() + ": " + op + ", " + path
+          + ", ugi=" + ugi + ", " + username + ", " + doAsUser
+          + Param.toSortedString(", ", parameters));
+    }
+
+    //clear content type
+    response.setContentType(null);
+  }
+
   private static DatanodeInfo chooseDatanode(final NameNode namenode,
       final String path, final HttpOpParam.Op op, final long openOffset
       ) throws IOException {
@@ -153,6 +169,7 @@ public class NamenodeWebHdfsMethods {
 
   private URI redirectURI(final NameNode namenode,
       final UserGroupInformation ugi, final DelegationParam delegation,
+      final UserParam username, final DoAsParam doAsUser,
       final String path, final HttpOpParam.Op op, final long openOffset,
       final Param<?, ?>... parameters) throws URISyntaxException, IOException {
     final DatanodeInfo dn = chooseDatanode(namenode, path, op, openOffset);
@@ -160,7 +177,7 @@ public class NamenodeWebHdfsMethods {
     final String delegationQuery;
     if (!UserGroupInformation.isSecurityEnabled()) {
       //security disabled
-      delegationQuery = "";
+      delegationQuery = Param.toSortedString("&", doAsUser, username);
     } else if (delegation.getValue() != null) {
       //client has provided a token
       delegationQuery = "&" + delegation;
@@ -170,8 +187,7 @@ public class NamenodeWebHdfsMethods {
           namenode, ugi, request.getUserPrincipal().getName());
       delegationQuery = "&" + new DelegationParam(t.encodeToUrlString());
     }
-    final String query = op.toQueryString()
-        + '&' + new UserParam(ugi) + delegationQuery
+    final String query = op.toQueryString() + delegationQuery
         + Param.toSortedString("&", parameters);
     final String uripath = WebHdfsFileSystem.PATH_PREFIX + path;
 
@@ -192,6 +208,10 @@ public class NamenodeWebHdfsMethods {
       @Context final UserGroupInformation ugi,
       @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT)
           final DelegationParam delegation,
+      @QueryParam(UserParam.NAME) @DefaultValue(UserParam.DEFAULT)
+          final UserParam username,
+      @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT)
+          final DoAsParam doAsUser,
       @QueryParam(PutOpParam.NAME) @DefaultValue(PutOpParam.DEFAULT)
           final PutOpParam op,
       @QueryParam(DestinationParam.NAME) @DefaultValue(DestinationParam.DEFAULT)
@@ -217,9 +237,9 @@ public class NamenodeWebHdfsMethods {
       @QueryParam(TokenArgumentParam.NAME) @DefaultValue(TokenArgumentParam.DEFAULT)
           final TokenArgumentParam delegationTokenArgument
       ) throws IOException, InterruptedException {
-    return put(ugi, delegation, ROOT, op, destination, owner, group,
-        permission, overwrite, bufferSize, replication, blockSize,
-        modificationTime, accessTime, delegationTokenArgument);
+    return put(ugi, delegation, username, doAsUser, ROOT, op, destination,
+        owner, group, permission, overwrite, bufferSize, replication,
+        blockSize, modificationTime, accessTime, delegationTokenArgument);
   }
 
   /** Handle HTTP PUT request. */
@@ -231,6 +251,10 @@ public class NamenodeWebHdfsMethods {
       @Context final UserGroupInformation ugi,
       @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT)
           final DelegationParam delegation,
+      @QueryParam(UserParam.NAME) @DefaultValue(UserParam.DEFAULT)
+          final UserParam username,
+      @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT)
+          final DoAsParam doAsUser,
       @PathParam(UriFsPathParam.NAME) final UriFsPathParam path,
       @QueryParam(PutOpParam.NAME) @DefaultValue(PutOpParam.DEFAULT)
           final PutOpParam op,
@@ -258,15 +282,9 @@ public class NamenodeWebHdfsMethods {
           final TokenArgumentParam delegationTokenArgument
       ) throws IOException, InterruptedException {
 
-    if (LOG.isTraceEnabled()) {
-      LOG.trace(op + ": " + path + ", ugi=" + ugi
-          + Param.toSortedString(", ", destination, owner, group, permission,
-              overwrite, bufferSize, replication, blockSize,
-              modificationTime, accessTime));
-    }
-
-    //clear content type
-    response.setContentType(null);
+    init(ugi, delegation, username, doAsUser, path, op, destination, owner,
+        group, permission, overwrite, bufferSize, replication, blockSize,
+        modificationTime, accessTime, delegationTokenArgument);
 
     return ugi.doAs(new PrivilegedExceptionAction<Response>() {
       @Override
@@ -281,8 +299,8 @@ public class NamenodeWebHdfsMethods {
     switch(op.getValue()) {
     case CREATE:
     {
-      final URI uri = redirectURI(namenode, ugi, delegation, fullpath,
-          op.getValue(), -1L,
+      final URI uri = redirectURI(namenode, ugi, delegation, username, doAsUser,
+          fullpath, op.getValue(), -1L,
           permission, overwrite, bufferSize, replication, blockSize);
       return Response.temporaryRedirect(uri).build();
     } 
@@ -358,12 +376,16 @@ public class NamenodeWebHdfsMethods {
       @Context final UserGroupInformation ugi,
       @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT)
           final DelegationParam delegation,
+      @QueryParam(UserParam.NAME) @DefaultValue(UserParam.DEFAULT)
+          final UserParam username,
+      @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT)
+          final DoAsParam doAsUser,
       @QueryParam(PostOpParam.NAME) @DefaultValue(PostOpParam.DEFAULT)
           final PostOpParam op,
       @QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT)
           final BufferSizeParam bufferSize
       ) throws IOException, InterruptedException {
-    return post(ugi, delegation, ROOT, op, bufferSize);
+    return post(ugi, delegation, username, doAsUser, ROOT, op, bufferSize);
   }
 
   /** Handle HTTP POST request. */
@@ -375,6 +397,10 @@ public class NamenodeWebHdfsMethods {
       @Context final UserGroupInformation ugi,
       @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT)
           final DelegationParam delegation,
+      @QueryParam(UserParam.NAME) @DefaultValue(UserParam.DEFAULT)
+          final UserParam username,
+      @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT)
+          final DoAsParam doAsUser,
       @PathParam(UriFsPathParam.NAME) final UriFsPathParam path,
       @QueryParam(PostOpParam.NAME) @DefaultValue(PostOpParam.DEFAULT)
           final PostOpParam op,
@@ -382,13 +408,7 @@ public class NamenodeWebHdfsMethods {
           final BufferSizeParam bufferSize
       ) throws IOException, InterruptedException {
 
-    if (LOG.isTraceEnabled()) {
-      LOG.trace(op + ": " + path + ", ugi=" + ugi
-          + Param.toSortedString(", ", bufferSize));
-    }
-
-    //clear content type
-    response.setContentType(null);
+    init(ugi, delegation, username, doAsUser, path, op, bufferSize);
 
     return ugi.doAs(new PrivilegedExceptionAction<Response>() {
       @Override
@@ -402,8 +422,8 @@ public class NamenodeWebHdfsMethods {
     switch(op.getValue()) {
     case APPEND:
     {
-      final URI uri = redirectURI(namenode, ugi, delegation, fullpath,
-          op.getValue(), -1L, bufferSize);
+      final URI uri = redirectURI(namenode, ugi, delegation, username, doAsUser,
+          fullpath, op.getValue(), -1L, bufferSize);
       return Response.temporaryRedirect(uri).build();
     }
     default:
@@ -425,6 +445,10 @@ public class NamenodeWebHdfsMethods {
       @Context final UserGroupInformation ugi,
       @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT)
           final DelegationParam delegation,
+      @QueryParam(UserParam.NAME) @DefaultValue(UserParam.DEFAULT)
+          final UserParam username,
+      @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT)
+          final DoAsParam doAsUser,
       @QueryParam(GetOpParam.NAME) @DefaultValue(GetOpParam.DEFAULT)
           final GetOpParam op,
       @QueryParam(OffsetParam.NAME) @DefaultValue(OffsetParam.DEFAULT)
@@ -436,7 +460,8 @@ public class NamenodeWebHdfsMethods {
       @QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT)
           final BufferSizeParam bufferSize
       ) throws IOException, URISyntaxException, InterruptedException {
-    return get(ugi, delegation, ROOT, op, offset, length, renewer, bufferSize);
+    return get(ugi, delegation, username, doAsUser, ROOT, op,
+        offset, length, renewer, bufferSize);
   }
 
   /** Handle HTTP GET request. */
@@ -447,6 +472,10 @@ public class NamenodeWebHdfsMethods {
       @Context final UserGroupInformation ugi,
       @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT)
           final DelegationParam delegation,
+      @QueryParam(UserParam.NAME) @DefaultValue(UserParam.DEFAULT)
+          final UserParam username,
+      @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT)
+          final DoAsParam doAsUser,
       @PathParam(UriFsPathParam.NAME) final UriFsPathParam path,
       @QueryParam(GetOpParam.NAME) @DefaultValue(GetOpParam.DEFAULT)
           final GetOpParam op,
@@ -460,13 +489,8 @@ public class NamenodeWebHdfsMethods {
           final BufferSizeParam bufferSize
       ) throws IOException, InterruptedException {
 
-    if (LOG.isTraceEnabled()) {
-      LOG.trace(op + ": " + path + ", ugi=" + ugi
-          + Param.toSortedString(", ", offset, length, renewer, bufferSize));
-    }
-
-    //clear content type
-    response.setContentType(null);
+    init(ugi, delegation, username, doAsUser, path, op,
+        offset, length, renewer, bufferSize);
 
     return ugi.doAs(new PrivilegedExceptionAction<Response>() {
       @Override
@@ -480,8 +504,8 @@ public class NamenodeWebHdfsMethods {
     switch(op.getValue()) {
     case OPEN:
     {
-      final URI uri = redirectURI(namenode, ugi, delegation, fullpath,
-          op.getValue(), offset.getValue(), offset, length, bufferSize);
+      final URI uri = redirectURI(namenode, ugi, delegation, username, doAsUser,
+          fullpath, op.getValue(), offset.getValue(), offset, length, bufferSize);
       return Response.temporaryRedirect(uri).build();
     }
     case GET_BLOCK_LOCATIONS:
@@ -516,17 +540,28 @@ public class NamenodeWebHdfsMethods {
     }
     case GETFILECHECKSUM:
     {
-      final URI uri = redirectURI(namenode, ugi, delegation, fullpath,
-          op.getValue(), -1L);
+      final URI uri = redirectURI(namenode, ugi, delegation, username, doAsUser,
+          fullpath, op.getValue(), -1L);
       return Response.temporaryRedirect(uri).build();
     }
     case GETDELEGATIONTOKEN:
     {
+      if (delegation.getValue() != null) {
+        throw new IllegalArgumentException(delegation.getName()
+            + " parameter is not null.");
+      }
       final Token<? extends TokenIdentifier> token = generateDelegationToken(
           namenode, ugi, renewer.getValue());
       final String js = JsonUtil.toJsonString(token);
       return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
     }
+    case GETHOMEDIRECTORY:
+    {
+      final String js = JsonUtil.toJsonString(
+          org.apache.hadoop.fs.Path.class.getSimpleName(),
+          WebHdfsFileSystem.getHomeDirectoryString(ugi));
+      return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
+    }
     default:
       throw new UnsupportedOperationException(op + " is not supported");
     }    
@@ -588,12 +623,18 @@ public class NamenodeWebHdfsMethods {
   @Produces(MediaType.APPLICATION_JSON)
   public Response deleteRoot(
       @Context final UserGroupInformation ugi,
+      @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT)
+          final DelegationParam delegation,
+      @QueryParam(UserParam.NAME) @DefaultValue(UserParam.DEFAULT)
+          final UserParam username,
+      @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT)
+          final DoAsParam doAsUser,
       @QueryParam(DeleteOpParam.NAME) @DefaultValue(DeleteOpParam.DEFAULT)
           final DeleteOpParam op,
       @QueryParam(RecursiveParam.NAME) @DefaultValue(RecursiveParam.DEFAULT)
           final RecursiveParam recursive
       ) throws IOException, InterruptedException {
-    return delete(ugi, ROOT, op, recursive);
+    return delete(ugi, delegation, username, doAsUser, ROOT, op, recursive);
   }
 
   /** Handle HTTP DELETE request. */
@@ -602,6 +643,12 @@ public class NamenodeWebHdfsMethods {
   @Produces(MediaType.APPLICATION_JSON)
   public Response delete(
       @Context final UserGroupInformation ugi,
+      @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT)
+          final DelegationParam delegation,
+      @QueryParam(UserParam.NAME) @DefaultValue(UserParam.DEFAULT)
+          final UserParam username,
+      @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT)
+          final DoAsParam doAsUser,
       @PathParam(UriFsPathParam.NAME) final UriFsPathParam path,
       @QueryParam(DeleteOpParam.NAME) @DefaultValue(DeleteOpParam.DEFAULT)
           final DeleteOpParam op,
@@ -609,13 +656,7 @@ public class NamenodeWebHdfsMethods {
           final RecursiveParam recursive
       ) throws IOException, InterruptedException {
 
-    if (LOG.isTraceEnabled()) {
-      LOG.trace(op + ": " + path + ", ugi=" + ugi
-          + Param.toSortedString(", ", recursive));
-    }
-
-    //clear content type
-    response.setContentType(null);
+    init(ugi, delegation, username, doAsUser, path, op, recursive);
 
     return ugi.doAs(new PrivilegedExceptionAction<Response>() {
       @Override

Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/AuthFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/AuthFilter.java?rev=1200733&r1=1200732&r2=1200733&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/AuthFilter.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/AuthFilter.java Fri Nov 11 04:21:44 2011
@@ -18,6 +18,12 @@
 package org.apache.hadoop.hdfs.web;
 
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
 import java.util.Properties;
 
 import javax.servlet.FilterChain;
@@ -26,6 +32,7 @@ import javax.servlet.ServletException;
 import javax.servlet.ServletRequest;
 import javax.servlet.ServletResponse;
 import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletRequestWrapper;
 
 import org.apache.hadoop.hdfs.web.resources.DelegationParam;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -67,15 +74,77 @@ public class AuthFilter extends Authenti
   @Override
   public void doFilter(ServletRequest request, ServletResponse response,
       FilterChain filterChain) throws IOException, ServletException {
-    HttpServletRequest httpRequest = (HttpServletRequest) request;
-    String tokenString = httpRequest
-        .getParameter(DelegationParam.NAME);
+    final HttpServletRequest httpRequest = toLowerCase((HttpServletRequest)request);
+    final String tokenString = httpRequest.getParameter(DelegationParam.NAME);
     if (tokenString != null) {
       //Token is present in the url, therefore token will be used for
       //authentication, bypass kerberos authentication.
       filterChain.doFilter(httpRequest, response);
       return;
     }
-    super.doFilter(request, response, filterChain);
+    super.doFilter(httpRequest, response, filterChain);
+  }
+
+  private static HttpServletRequest toLowerCase(final HttpServletRequest request) {
+    @SuppressWarnings("unchecked")
+    final Map<String, String[]> original = (Map<String, String[]>)request.getParameterMap();
+    if (!ParamFilter.containsUpperCase(original.keySet())) {
+      return request;
+    }
+
+    final Map<String, List<String>> m = new HashMap<String, List<String>>();
+    for(Map.Entry<String, String[]> entry : original.entrySet()) {
+      final String key = entry.getKey().toLowerCase();
+      List<String> strings = m.get(key);
+      if (strings == null) {
+        strings = new ArrayList<String>();
+        m.put(key, strings);
+      }
+      for(String v : entry.getValue()) {
+        strings.add(v);
+      }
+    }
+
+    return new HttpServletRequestWrapper(request) {
+      private Map<String, String[]> parameters = null;
+
+      @Override
+      public Map<String, String[]> getParameterMap() {
+        if (parameters == null) {
+          parameters = new HashMap<String, String[]>();
+          for(Map.Entry<String, List<String>> entry : m.entrySet()) {
+            final List<String> a = entry.getValue();
+            parameters.put(entry.getKey(), a.toArray(new String[a.size()]));
+          }
+        }
+       return parameters;
+      }
+
+      @Override
+      public String getParameter(String name) {
+        final List<String> a = m.get(name);
+        return a == null? null: a.get(0);
+      }
+      
+      @Override
+      public String[] getParameterValues(String name) {
+        return getParameterMap().get(name);
+      }
+
+      @Override
+      public Enumeration<String> getParameterNames() {
+        final Iterator<String> i = m.keySet().iterator();
+        return new Enumeration<String>() {
+          @Override
+          public boolean hasMoreElements() {
+            return i.hasNext();
+          }
+          @Override
+          public String nextElement() {
+            return i.next();
+          }
+        };
+      }
+    };
   }
 }
\ No newline at end of file

Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/ParamFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/ParamFilter.java?rev=1200733&r1=1200732&r2=1200733&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/ParamFilter.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/ParamFilter.java Fri Nov 11 04:21:44 2011
@@ -59,7 +59,7 @@ public class ParamFilter implements Reso
   }
 
   /** Do the strings contain upper case letters? */
-  private static boolean containsUpperCase(final Iterable<String> strings) {
+  static boolean containsUpperCase(final Iterable<String> strings) {
     for(String s : strings) {
       for(int i = 0; i < s.length(); i++) {
         if (Character.isUpperCase(s.charAt(i))) {

Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java?rev=1200733&r1=1200732&r2=1200733&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java Fri Nov 11 04:21:44 2011
@@ -39,6 +39,7 @@ import org.apache.hadoop.fs.BlockLocatio
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileAlreadyExistsException;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum;
@@ -48,7 +49,6 @@ import org.apache.hadoop.hdfs.ByteRangeI
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException;
-import org.apache.hadoop.hdfs.protocol.FSConstants;
 import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
 import org.apache.hadoop.hdfs.protocol.NSQuotaExceededException;
 import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
@@ -86,6 +86,8 @@ import org.apache.hadoop.security.Securi
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
 import org.apache.hadoop.security.authentication.client.AuthenticationException;
+import org.apache.hadoop.security.authorize.AuthorizationException;
+import org.apache.hadoop.security.token.SecretManager.InvalidToken;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.security.token.TokenRenewer;
@@ -195,9 +197,14 @@ public class WebHdfsFileSystem extends F
     }
   }
 
+  /** @return the home directory. */
+  public static String getHomeDirectoryString(final UserGroupInformation ugi) {
+    return "/user/" + ugi.getShortUserName();
+  }
+
   @Override
   public Path getHomeDirectory() {
-    return makeQualified(new Path("/user/" + ugi.getShortUserName()));
+    return makeQualified(new Path(getHomeDirectoryString(ugi)));
   }
 
   @Override
@@ -219,7 +226,7 @@ public class WebHdfsFileSystem extends F
     return f.isAbsolute()? f: new Path(workingDir, f);
   }
 
-  private static Map<?, ?> jsonParse(final InputStream in) throws IOException {
+  static Map<?, ?> jsonParse(final InputStream in) throws IOException {
     if (in == null) {
       throw new IOException("The input stream is null.");
     }
@@ -245,9 +252,13 @@ public class WebHdfsFileSystem extends F
 
       final RemoteException re = JsonUtil.toRemoteException(m);
       throw re.unwrapRemoteException(AccessControlException.class,
-          DSQuotaExceededException.class,
+          InvalidToken.class,
+          AuthenticationException.class,
+          AuthorizationException.class,
+          FileAlreadyExistsException.class,
           FileNotFoundException.class,
           SafeModeException.class,
+          DSQuotaExceededException.class,
           NSQuotaExceededException.class);
     }
     return null;
@@ -343,7 +354,7 @@ public class WebHdfsFileSystem extends F
   /**
    * Two-step Create/Append:
    * Step 1) Submit a Http request with neither auto-redirect nor data. 
-   * Step 2) Submit Http PUT with the URL from the Location header with data.
+   * Step 2) Submit another Http request with the URL from the Location header with data.
    * 
    * The reason of having two-step create/append is for preventing clients to
    * send out the data before the redirect. This issue is addressed by the
@@ -353,7 +364,7 @@ public class WebHdfsFileSystem extends F
    * 100-continue". The two-step create/append is a temporary workaround for
    * the software library bugs.
    */
-  private static HttpURLConnection twoStepWrite(HttpURLConnection conn,
+  static HttpURLConnection twoStepWrite(HttpURLConnection conn,
       final HttpOpParam.Op op) throws IOException {
     //Step 1) Submit a Http request with neither auto-redirect nor data. 
     conn.setInstanceFollowRedirects(false);
@@ -363,7 +374,7 @@ public class WebHdfsFileSystem extends F
     final String redirect = conn.getHeaderField("Location");
     conn.disconnect();
 
-    //Step 2) Submit Http PUT with the URL from the Location header with data.
+    //Step 2) Submit another Http request with the URL from the Location header with data.
     conn = (HttpURLConnection)new URL(redirect).openConnection();
     conn.setRequestMethod(op.getType().toString());
     return conn;
@@ -415,8 +426,7 @@ public class WebHdfsFileSystem extends F
 
   private FileStatus makeQualified(HdfsFileStatus f, Path parent) {
     return new FileStatus(f.getLen(), f.isDir(), f.getReplication(),
-        f.getBlockSize(), f.getModificationTime(),
-        f.getAccessTime(),
+        f.getBlockSize(), f.getModificationTime(), f.getAccessTime(),
         f.getPermission(), f.getOwner(), f.getGroup(),
         f.getFullPath(parent).makeQualified(this)); // fully-qualify path
   }
@@ -478,7 +488,8 @@ public class WebHdfsFileSystem extends F
 
   @Override
   public long getDefaultBlockSize() {
-    return getConf().getLong("dfs.block.size", FSConstants.DEFAULT_BLOCK_SIZE);
+    return getConf().getLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY,
+        DFSConfigKeys.DFS_BLOCK_SIZE_DEFAULT);
   }
 
   @Override
@@ -487,7 +498,7 @@ public class WebHdfsFileSystem extends F
         DFSConfigKeys.DFS_REPLICATION_DEFAULT);
   }
 
-  private FSDataOutputStream write(final HttpOpParam.Op op,
+  FSDataOutputStream write(final HttpOpParam.Op op,
       final HttpURLConnection conn, final int bufferSize) throws IOException {
     return new FSDataOutputStream(new BufferedOutputStream(
         conn.getOutputStream(), bufferSize), statistics) {
@@ -496,7 +507,11 @@ public class WebHdfsFileSystem extends F
         try {
           super.close();
         } finally {
-          validateResponse(op, conn);
+          try {
+            validateResponse(op, conn);
+          } finally {
+            conn.disconnect();
+          }
         }
       }
     };
@@ -661,9 +676,11 @@ public class WebHdfsFileSystem extends F
   }
 
   @Override
-  public synchronized <T extends TokenIdentifier> void setDelegationToken(
+  public <T extends TokenIdentifier> void setDelegationToken(
       final Token<T> token) {
-    delegationToken = token;
+    synchronized(this) {
+      delegationToken = token;
+    }
   }
 
   private synchronized long renewDelegationToken(final Token<?> token

Added: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/DoAsParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/DoAsParam.java?rev=1200733&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/DoAsParam.java (added)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/DoAsParam.java Fri Nov 11 04:21:44 2011
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.web.resources;
+
+/** DoAs parameter for proxy user. */
+public class DoAsParam extends StringParam {
+  /** Parameter name. */
+  public static final String NAME = "doas";
+  /** Default parameter value. */
+  public static final String DEFAULT = "";
+
+  private static final Domain DOMAIN = new Domain(NAME, null);
+
+  /**
+   * Constructor.
+   * @param str a string representation of the parameter value.
+   */
+  public DoAsParam(final String str) {
+    super(DOMAIN, str == null || str.equals(DEFAULT)? null: str);
+  }
+
+  @Override
+  public String getName() {
+    return NAME;
+  }
+}
\ No newline at end of file

Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/ExceptionHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/ExceptionHandler.java?rev=1200733&r1=1200732&r2=1200733&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/ExceptionHandler.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/ExceptionHandler.java Fri Nov 11 04:21:44 2011
@@ -30,14 +30,25 @@ import javax.ws.rs.ext.Provider;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hdfs.web.JsonUtil;
+import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.security.authorize.AuthorizationException;
 
 import com.sun.jersey.api.ParamException;
+import com.sun.jersey.api.container.ContainerException;
 
 /** Handle exceptions. */
 @Provider
 public class ExceptionHandler implements ExceptionMapper<Exception> {
   public static final Log LOG = LogFactory.getLog(ExceptionHandler.class);
 
+  private static Exception toCause(Exception e) {
+    final Throwable t = e.getCause();
+    if (t != null && t instanceof Exception) {
+      e = (Exception)e.getCause();
+    }
+    return e;
+  }
+
   private @Context HttpServletResponse response;
 
   @Override
@@ -55,12 +66,20 @@ public class ExceptionHandler implements
       e = new IllegalArgumentException("Invalid value for webhdfs parameter \""
           + paramexception.getParameterName() + "\": "
           + e.getCause().getMessage(), e);
-    } 
+    }
+    if (e instanceof ContainerException) {
+      e = toCause(e);
+    }
+    if (e instanceof RemoteException) {
+      e = ((RemoteException)e).unwrapRemoteException();
+    }
 
     //Map response status
     final Response.Status s;
     if (e instanceof SecurityException) {
       s = Response.Status.UNAUTHORIZED;
+    } else if (e instanceof AuthorizationException) {
+      s = Response.Status.UNAUTHORIZED;
     } else if (e instanceof FileNotFoundException) {
       s = Response.Status.NOT_FOUND;
     } else if (e instanceof IOException) {

Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/GetOpParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/GetOpParam.java?rev=1200733&r1=1200732&r2=1200733&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/GetOpParam.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/GetOpParam.java Fri Nov 11 04:21:44 2011
@@ -30,6 +30,7 @@ public class GetOpParam extends HttpOpPa
     GETCONTENTSUMMARY(HttpURLConnection.HTTP_OK),
     GETFILECHECKSUM(HttpURLConnection.HTTP_OK),
 
+    GETHOMEDIRECTORY(HttpURLConnection.HTTP_OK),
     GETDELEGATIONTOKEN(HttpURLConnection.HTTP_OK),
 
     /** GET_BLOCK_LOCATIONS is a private unstable op. */

Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java?rev=1200733&r1=1200732&r2=1200733&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java Fri Nov 11 04:21:44 2011
@@ -58,7 +58,7 @@ public abstract class HttpOpParam<E exte
     public static TemporaryRedirectOp valueOf(final Op op) {
       if (op == CREATE.op) {
         return CREATE;
-      } if (op == APPEND.op) {
+      } else if (op == APPEND.op) {
         return APPEND;
       }
       throw new IllegalArgumentException(op + " not found.");

Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/UserProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/UserProvider.java?rev=1200733&r1=1200732&r2=1200733&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/UserProvider.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/UserProvider.java Fri Nov 11 04:21:44 2011
@@ -53,7 +53,8 @@ public class UserProvider
       return JspHelper.getUGI(servletcontext, request, conf,
           AuthenticationMethod.KERBEROS, false);
     } catch (IOException e) {
-      throw new SecurityException("Failed to obtain user group information.", e);
+      throw new SecurityException(
+          "Failed to obtain user group information: " + e, e);
     }
   }
 

Modified: hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/security/TestDelegationTokenForProxyUser.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/security/TestDelegationTokenForProxyUser.java?rev=1200733&r1=1200732&r2=1200733&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/security/TestDelegationTokenForProxyUser.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/security/TestDelegationTokenForProxyUser.java Fri Nov 11 04:21:44 2011
@@ -23,27 +23,44 @@ package org.apache.hadoop.hdfs.security;
 import java.io.ByteArrayInputStream;
 import java.io.DataInputStream;
 import java.io.IOException;
+import java.net.HttpURLConnection;
 import java.net.InetAddress;
 import java.net.NetworkInterface;
+import java.net.URL;
 import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.Enumeration;
+import java.util.Map;
 
-import junit.framework.Assert;
+import javax.servlet.http.HttpServletResponse;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.commons.logging.impl.Log4JLogger;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.io.Text;
-import org.apache.commons.logging.*;
+import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
+import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods;
+import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
+import org.apache.hadoop.hdfs.web.WebHdfsTestUtil;
+import org.apache.hadoop.hdfs.web.resources.DoAsParam;
+import org.apache.hadoop.hdfs.web.resources.ExceptionHandler;
+import org.apache.hadoop.hdfs.web.resources.GetOpParam;
+import org.apache.hadoop.hdfs.web.resources.PutOpParam;
 import org.apache.hadoop.security.TestDoAsEffectiveUser;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.ProxyUsers;
 import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
+import org.apache.log4j.Level;
 import org.junit.After;
+import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -87,6 +104,7 @@ public class TestDelegationTokenForProxy
   @Before
   public void setUp() throws Exception {
     config = new Configuration();
+    config.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true);
     config.setLong(
         DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_MAX_LIFETIME_KEY, 10000);
     config.setLong(
@@ -121,7 +139,7 @@ public class TestDelegationTokenForProxy
             public Token<DelegationTokenIdentifier> run() throws IOException {
               DistributedFileSystem dfs = (DistributedFileSystem) cluster
                   .getFileSystem();
-              return dfs.getDelegationToken(new Text("RenewerUser"));
+              return dfs.getDelegationToken("RenewerUser");
             }
           });
       DelegationTokenIdentifier identifier = new DelegationTokenIdentifier();
@@ -136,4 +154,64 @@ public class TestDelegationTokenForProxy
     }
   }
   
+  @Test
+  public void testWebHdfsDoAs() throws Exception {
+    WebHdfsTestUtil.LOG.info("START: testWebHdfsDoAs()");
+    ((Log4JLogger)NamenodeWebHdfsMethods.LOG).getLogger().setLevel(Level.ALL);
+    ((Log4JLogger)ExceptionHandler.LOG).getLogger().setLevel(Level.ALL);
+    final UserGroupInformation ugi = UserGroupInformation.createRemoteUser(REAL_USER);
+    WebHdfsTestUtil.LOG.info("ugi.getShortUserName()=" + ugi.getShortUserName());
+    final WebHdfsFileSystem webhdfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(ugi, config);
+    
+    final Path root = new Path("/");
+    cluster.getFileSystem().setPermission(root, new FsPermission((short)0777));
+
+    {
+      //test GETHOMEDIRECTORY with doAs
+      final URL url = WebHdfsTestUtil.toUrl(webhdfs,
+          GetOpParam.Op.GETHOMEDIRECTORY,  root, new DoAsParam(PROXY_USER));
+      final HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+      final Map<?, ?> m = WebHdfsTestUtil.connectAndGetJson(conn, HttpServletResponse.SC_OK);
+      conn.disconnect();
+  
+      final Object responsePath = m.get(Path.class.getSimpleName());
+      WebHdfsTestUtil.LOG.info("responsePath=" + responsePath);
+      Assert.assertEquals("/user/" + PROXY_USER, responsePath);
+    }
+
+    {
+      //test GETHOMEDIRECTORY with DOas
+      final URL url = WebHdfsTestUtil.toUrl(webhdfs,
+          GetOpParam.Op.GETHOMEDIRECTORY,  root, new DoAsParam(PROXY_USER) {
+            @Override
+            public String getName() {
+              return "DOas";
+            }
+      });
+      final HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+      final Map<?, ?> m = WebHdfsTestUtil.connectAndGetJson(conn, HttpServletResponse.SC_OK);
+      conn.disconnect();
+  
+      final Object responsePath = m.get(Path.class.getSimpleName());
+      WebHdfsTestUtil.LOG.info("responsePath=" + responsePath);
+      Assert.assertEquals("/user/" + PROXY_USER, responsePath);
+    }
+
+    {
+      //test create file with doAs
+      final Path f = new Path("/testWebHdfsDoAs/a.txt");
+      final PutOpParam.Op op = PutOpParam.Op.CREATE;
+      final URL url = WebHdfsTestUtil.toUrl(webhdfs, op,  f, new DoAsParam(PROXY_USER));
+      WebHdfsTestUtil.LOG.info("url=" + url);
+      HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+      conn = WebHdfsTestUtil.twoStepWrite(conn, op);
+      final FSDataOutputStream out = WebHdfsTestUtil.write(webhdfs, op, conn, 4096);
+      out.write("Hello, webhdfs user!".getBytes());
+      out.close();
+  
+      final FileStatus status = webhdfs.getFileStatus(f);
+      WebHdfsTestUtil.LOG.info("status.getOwner()=" + status.getOwner());
+      Assert.assertEquals(PROXY_USER, status.getOwner());
+    }
+  }
 }

Modified: hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java?rev=1200733&r1=1200732&r2=1200733&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java Fri Nov 11 04:21:44 2011
@@ -23,9 +23,8 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.net.HttpURLConnection;
-import java.net.URI;
 import java.net.URL;
-import java.security.PrivilegedExceptionAction;
+import java.util.Map;
 
 import javax.servlet.http.HttpServletResponse;
 
@@ -40,6 +39,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hdfs.web.resources.DoAsParam;
 import org.apache.hadoop.hdfs.web.resources.GetOpParam;
 import org.apache.hadoop.hdfs.web.resources.HttpOpParam;
 import org.apache.hadoop.hdfs.web.resources.PutOpParam;
@@ -51,6 +51,8 @@ public class TestWebHdfsFileSystemContra
   private static final Configuration conf = new Configuration();
   private static final MiniDFSCluster cluster;
   private String defaultWorkingDirectory;
+  
+  private UserGroupInformation ugi;
 
   static {
     conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true);
@@ -68,20 +70,11 @@ public class TestWebHdfsFileSystemContra
 
   @Override
   protected void setUp() throws Exception {
-    final String uri = WebHdfsFileSystem.SCHEME  + "://"
-        + conf.get("dfs.http.address");
-
     //get file system as a non-superuser
     final UserGroupInformation current = UserGroupInformation.getCurrentUser();
-    final UserGroupInformation ugi = UserGroupInformation.createUserForTesting(
+    ugi = UserGroupInformation.createUserForTesting(
         current.getShortUserName() + "x", new String[]{"user"});
-    fs = ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
-      @Override
-      public FileSystem run() throws Exception {
-        return FileSystem.get(new URI(uri), conf);
-      }
-    });
-
+    fs = WebHdfsTestUtil.getWebHdfsFileSystemAs(ugi, conf);
     defaultWorkingDirectory = fs.getWorkingDirectory().toUri().getPath();
   }
 
@@ -283,9 +276,29 @@ public class TestWebHdfsFileSystemContra
 
   public void testResponseCode() throws IOException {
     final WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)fs;
+    final Path root = new Path("/");
     final Path dir = new Path("/test/testUrl");
     assertTrue(webhdfs.mkdirs(dir));
 
+    {//test GETHOMEDIRECTORY
+      final URL url = webhdfs.toUrl(GetOpParam.Op.GETHOMEDIRECTORY, root);
+      final HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+      final Map<?, ?> m = WebHdfsTestUtil.connectAndGetJson(
+          conn, HttpServletResponse.SC_OK);
+      assertEquals(WebHdfsFileSystem.getHomeDirectoryString(ugi),
+          m.get(Path.class.getSimpleName()));
+      conn.disconnect();
+    }
+
+    {//test GETHOMEDIRECTORY with unauthorized doAs
+      final URL url = webhdfs.toUrl(GetOpParam.Op.GETHOMEDIRECTORY, root,
+          new DoAsParam(ugi.getShortUserName() + "proxy"));
+      final HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+      conn.connect();
+      assertEquals(HttpServletResponse.SC_UNAUTHORIZED, conn.getResponseCode());
+      conn.disconnect();
+    }
+
     {//test set owner with empty parameters
       final URL url = webhdfs.toUrl(PutOpParam.Op.SETOWNER, dir);
       final HttpURLConnection conn = (HttpURLConnection) url.openConnection();

Added: hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java?rev=1200733&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java (added)
+++ hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java Fri Nov 11 04:21:44 2011
@@ -0,0 +1,89 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.web;
+
+import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.security.PrivilegedExceptionAction;
+import java.util.Map;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.web.resources.HttpOpParam;
+import org.apache.hadoop.hdfs.web.resources.Param;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.junit.Assert;
+
+public class WebHdfsTestUtil {
+  public static final Log LOG = LogFactory.getLog(WebHdfsTestUtil.class);
+
+  public static WebHdfsFileSystem getWebHdfsFileSystem(final Configuration conf
+      ) throws IOException, URISyntaxException {
+    final String uri = WebHdfsFileSystem.SCHEME  + "://"
+        + conf.get("dfs.http.address");
+    return (WebHdfsFileSystem)FileSystem.get(new URI(uri), conf);
+  }
+
+  public static WebHdfsFileSystem getWebHdfsFileSystemAs(
+      final UserGroupInformation ugi, final Configuration conf
+      ) throws IOException, URISyntaxException, InterruptedException {
+    return ugi.doAs(new PrivilegedExceptionAction<WebHdfsFileSystem>() {
+      @Override
+      public WebHdfsFileSystem run() throws Exception {
+        return getWebHdfsFileSystem(conf);
+      }
+    });
+  }
+
+  public static URL toUrl(final WebHdfsFileSystem webhdfs,
+      final HttpOpParam.Op op, final Path fspath,
+      final Param<?,?>... parameters) throws IOException {
+    final URL url = webhdfs.toUrl(op, fspath, parameters);
+    WebHdfsTestUtil.LOG.info("url=" + url);
+    return url;
+  }
+
+  public static Map<?, ?> connectAndGetJson(final HttpURLConnection conn,
+      final int expectedResponseCode) throws IOException {
+    conn.connect();
+    Assert.assertEquals(expectedResponseCode, conn.getResponseCode());
+    return WebHdfsFileSystem.jsonParse(conn.getInputStream());
+  }
+  
+  public static HttpURLConnection twoStepWrite(HttpURLConnection conn,
+      final HttpOpParam.Op op) throws IOException {
+    conn.setRequestMethod(op.getType().toString());
+    conn = WebHdfsFileSystem.twoStepWrite(conn, op);
+    conn.setDoOutput(true);
+    conn.connect();
+    return conn;
+  }
+
+  public static FSDataOutputStream write(final WebHdfsFileSystem webhdfs,
+      final HttpOpParam.Op op, final HttpURLConnection conn,
+      final int bufferSize) throws IOException {
+    return webhdfs.write(op, conn, bufferSize);
+  }
+}



Mime
View raw message