hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From szets...@apache.org
Subject svn commit: r931428 - in /hadoop/hdfs/trunk: CHANGES.txt src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/AuthorizationFilter.java src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestAuthorizationFilter.java
Date Wed, 07 Apr 2010 04:21:20 GMT
Author: szetszwo
Date: Wed Apr  7 04:21:20 2010
New Revision: 931428

URL: http://svn.apache.org/viewvc?rev=931428&view=rev
Log:
HDFS-1012. hdfsproxy: Support for fully qualified HDFS path in addition to simple unqualified
path.  Contributed by Srikanth Sundarrajan

Modified:
    hadoop/hdfs/trunk/CHANGES.txt
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/AuthorizationFilter.java
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestAuthorizationFilter.java

Modified: hadoop/hdfs/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/CHANGES.txt?rev=931428&r1=931427&r2=931428&view=diff
==============================================================================
--- hadoop/hdfs/trunk/CHANGES.txt (original)
+++ hadoop/hdfs/trunk/CHANGES.txt Wed Apr  7 04:21:20 2010
@@ -128,6 +128,9 @@ Trunk (unreleased changes)
     HDFS-997. Allow datanode storage directory permissions to be configurable.
     (Luke Lu via cdouglas)
 
+    HDFS-1012. hdfsproxy: Support for fully qualified HDFS path in addition to
+    simple unqualified path.  (Srikanth Sundarrajan via szetszwo)
+
   OPTIMIZATIONS
 
     HDFS-946. NameNode should not return full path name when lisitng a

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/AuthorizationFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/AuthorizationFilter.java?rev=931428&r1=931427&r2=931428&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/AuthorizationFilter.java
(original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/AuthorizationFilter.java
Wed Apr  7 04:21:20 2010
@@ -21,18 +21,23 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.conf.Configuration;
 
 import javax.servlet.*;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 import java.io.IOException;
-import java.util.Arrays;
 import java.util.List;
+import java.util.Arrays;
 import java.util.regex.Pattern;
+import java.util.regex.Matcher;
 
 public class AuthorizationFilter implements Filter {
   public static final Log LOG = LogFactory.getLog(AuthorizationFilter.class);
 
+  private static final Pattern HDFS_PATH_PATTERN = Pattern
+      .compile("(^hdfs://([\\w\\-]+(\\.)?)+:\\d+|^hdfs://([\\w\\-]+(\\.)?)+)");
+
   /** Pattern for a filter to find out if a request is HFTP/HSFTP request */
   protected static final Pattern HFTP_PATTERN = Pattern
       .compile("^(/listPaths|/data|/streamFile|/file)$");
@@ -44,8 +49,14 @@ public class AuthorizationFilter impleme
   protected static final Pattern FILEPATH_PATTERN = Pattern
       .compile("^(/listPaths|/data|/file)$");
 
+  protected String namenode;
+  
   /** {@inheritDoc} **/
   public void init(FilterConfig filterConfig) throws ServletException {
+    Configuration conf = new Configuration(false);
+    conf.addResource("hdfsproxy-default.xml");
+    conf.addResource("hdfsproxy-site.xml");
+    namenode = conf.get("fs.default.name");
   }
 
   /** {@inheritDoc} **/
@@ -120,24 +131,48 @@ public class AuthorizationFilter impleme
     return filePath;
   }
 
-  /** check that the requested path is listed in the ldap entry */
-  protected boolean checkHdfsPath(String pathInfo, List<Path> allowedPaths) {
+  /** check that the requested path is listed in the ldap entry
+   * @param pathInfo - Path to check access
+   * @param ldapPaths - List of paths allowed access
+   * @return true if access allowed, false otherwise */
+  public boolean checkHdfsPath(String pathInfo,
+                               List<Path> ldapPaths) {
     if (pathInfo == null || pathInfo.length() == 0) {
       LOG.info("Can't get file path from the request");
       return false;
     }
-    Path userPath = new Path(pathInfo);
-    while (userPath != null) {
-      if (LOG.isDebugEnabled()) {
-        LOG.debug("\n Checking file path " + userPath);
+    for (Path ldapPathVar : ldapPaths) {
+      String ldapPath = ldapPathVar.toString();
+      if (isPathQualified(ldapPath) &&
+          isPathAuthroized(ldapPath)) {
+        String allowedPath = extractPath(ldapPath);
+        if (pathInfo.startsWith(allowedPath))
+          return true;
+      } else {
+        if (pathInfo.startsWith(ldapPath))
+          return true;
       }
-      if (allowedPaths.contains(userPath))
-        return true;
-      userPath = userPath.getParent();
     }
     return false;
   }
 
+  private String extractPath(String ldapPath) {
+    return HDFS_PATH_PATTERN.split(ldapPath)[1];
+  }
+
+  private boolean isPathAuthroized(String pathStr) {
+    Matcher namenodeMatcher = HDFS_PATH_PATTERN.matcher(pathStr);
+    return namenodeMatcher.find() && namenodeMatcher.group().contains(namenode);
+  }
+
+  private boolean isPathQualified(String pathStr) {
+    if (pathStr == null || pathStr.trim().isEmpty()) {
+      return false;
+    } else {
+      return HDFS_PATH_PATTERN.matcher(pathStr).find();
+    }
+  }
+
   /** {@inheritDoc} **/
   public void destroy() {
   }

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestAuthorizationFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestAuthorizationFilter.java?rev=931428&r1=931427&r2=931428&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestAuthorizationFilter.java
(original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestAuthorizationFilter.java
Wed Apr  7 04:21:20 2010
@@ -55,18 +55,28 @@ public class TestAuthorizationFilter ext
     }
   }
 
+  private class ConfiguredAuthorizationFilter extends AuthorizationFilter {
+    
+    private ConfiguredAuthorizationFilter(String nameNode) {
+      this.namenode = nameNode;
+    }  
+  }
+
   public void beginPathRestriction(WebRequest theRequest) {
     theRequest.setURL("proxy-test:0", null, "/streamFile", null,
         "filename=/nontestdir");
   }
 
   public void testPathRestriction() throws ServletException, IOException {
-    AuthorizationFilter filter = new AuthorizationFilter();
+    AuthorizationFilter filter = new 
+        ConfiguredAuthorizationFilter("hdfs://apache.org");
     request.setRemoteIPAddress("127.0.0.1");
     request.setAttribute("org.apache.hadoop.hdfsproxy.authorized.userID",
         System.getProperty("user.name"));
     List<Path> paths = new ArrayList<Path>();
     paths.add(new Path("/deny"));
+    paths.add(new Path("hdfs://test:100/deny"));
+    paths.add(new Path("hdfs://test/deny"));
     request.setAttribute("org.apache.hadoop.hdfsproxy.authorized.paths",
         paths);
     FilterChain mockFilterChain = new DummyFilterChain();
@@ -79,4 +89,73 @@ public class TestAuthorizationFilter ext
         + theResponse.getText() + "]", theResponse.getText().indexOf(
         "is not authorized to access path") > 0);
   }
+
+  public void beginPathPermit(WebRequest theRequest) {
+    theRequest.setURL("proxy-test:0", null, "/streamFile", null,
+        "filename=/data/file");
+  }
+
+  public void testPathPermit() throws ServletException, IOException {
+    AuthorizationFilter filter = new 
+        ConfiguredAuthorizationFilter("hdfs://apache.org");
+    request.setRemoteIPAddress("127.0.0.1");
+    request.setAttribute("org.apache.hadoop.hdfsproxy.authorized.userID",
+        System.getProperty("user.name"));
+    List<Path> paths = new ArrayList<Path>();
+    paths.add(new Path("/data"));
+    request.setAttribute("org.apache.hadoop.hdfsproxy.authorized.paths",
+        paths);
+    FilterChain mockFilterChain = new DummyFilterChain();
+    filter.doFilter(request, response, mockFilterChain);
+  }
+
+  public void endPathPermit(WebResponse theResponse) {
+    assertEquals(theResponse.getStatusCode(), 200);
+  }
+
+  public void beginPathPermitQualified(WebRequest theRequest) {
+    theRequest.setURL("proxy-test:0", null, "/streamFile", null,
+        "filename=/data/file");
+  }
+
+  public void testPathPermitQualified() throws ServletException, IOException {
+    AuthorizationFilter filter = new 
+        ConfiguredAuthorizationFilter("hdfs://apache.org");
+    request.setRemoteIPAddress("127.0.0.1");
+    request.setAttribute("org.apache.hadoop.hdfsproxy.authorized.userID",
+        System.getProperty("user.name"));
+    List<Path> paths = new ArrayList<Path>();
+    paths.add(new Path("hdfs://apache.org/data"));
+    request.setAttribute("org.apache.hadoop.hdfsproxy.authorized.paths",
+        paths);
+    FilterChain mockFilterChain = new DummyFilterChain();
+    filter.doFilter(request, response, mockFilterChain);
+  }
+
+  public void endPathPermitQualified(WebResponse theResponse) {
+    assertEquals(theResponse.getStatusCode(), 200);
+  }
+  
+  public void beginPathQualifiediReject(WebRequest theRequest) {
+    theRequest.setURL("proxy-test:0", null, "/streamFile", null,
+        "filename=/data/file");
+  }
+
+  public void testPathQualifiedReject() throws ServletException, IOException {
+    AuthorizationFilter filter = new 
+        ConfiguredAuthorizationFilter("hdfs://apache.org:1111");
+    request.setRemoteIPAddress("127.0.0.1");
+    request.setAttribute("org.apache.hadoop.hdfsproxy.authorized.userID",
+        System.getProperty("user.name"));
+    List<Path> paths = new ArrayList<Path>();
+    paths.add(new Path("hdfs://apache.org:2222/data"));
+    request.setAttribute("org.apache.hadoop.hdfsproxy.authorized.paths",
+        paths);
+    FilterChain mockFilterChain = new DummyFilterChain();
+    filter.doFilter(request, response, mockFilterChain);
+  }
+
+  public void endPathQualifiedReject(WebResponse theResponse) {
+    assertEquals(theResponse.getStatusCode(), 403);
+  }
 }



Mime
View raw message