hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From szets...@apache.org
Subject svn commit: r931243 - in /hadoop/hdfs/trunk: ./ src/contrib/hdfsproxy/ src/contrib/hdfsproxy/conf/ src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/
Date Tue, 06 Apr 2010 17:50:28 GMT
Author: szetszwo
Date: Tue Apr  6 17:50:28 2010
New Revision: 931243

URL: http://svn.apache.org/viewvc?rev=931243&view=rev
Log:
HDFS-481. hdfsproxy: Bug Fixes + HdfsProxy to use proxy user to impresonate the real user.
 Contributed by Srikanth

Added:
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/AuthorizationFilter.java
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestAuthorizationFilter.java
Modified:
    hadoop/hdfs/trunk/CHANGES.txt
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/build.xml
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/conf/hdfsproxy-default.xml
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/conf/tomcat-web.xml
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/LdapIpDirFilter.java
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileForward.java
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyForwardServlet.java
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestHdfsProxy.java
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestLdapIpDirFilter.java

Modified: hadoop/hdfs/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/CHANGES.txt?rev=931243&r1=931242&r2=931243&view=diff
==============================================================================
--- hadoop/hdfs/trunk/CHANGES.txt (original)
+++ hadoop/hdfs/trunk/CHANGES.txt Tue Apr  6 17:50:28 2010
@@ -227,6 +227,9 @@ Trunk (unreleased changes)
     HDFS-1074. hdfsproxy: Fix bugs in TestProxyUtil.  (Srikanth Sundarrajan
     via szetszwo)
 
+    HDFS-481. hdfsproxy: Bug Fixes + HdfsProxy to use proxy user to
+    impresonate the real user.  (Srikanth Sundarrajan via szetszwo)
+
 Release 0.21.0 - Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/build.xml?rev=931243&r1=931242&r2=931243&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/build.xml (original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/build.xml Tue Apr  6 17:50:28 2010
@@ -60,6 +60,7 @@
   	<and>
 	    <or>
 	    	<equals arg1="${testcase}" arg2="TestProxyFilter" />
+	    	<equals arg1="${testcase}" arg2="TestAuthorizationFilter" />
 	    	<equals arg1="${testcase}" arg2="TestLdapIpDirFilter" />
 				<equals arg1="${testcase}" arg2="TestProxyUtil" />
 				<equals arg1="${testcase}" arg2="TestProxyForwardServlet" />
@@ -222,7 +223,7 @@
 	<target name="test" depends="compile,compile-test,test-junit,test-cactus" description="Automated
Test Framework" if="test.available"/>
 	
 	<target name="test-junit" depends="compile,compile-test" if="test.available">
-		<junit fork="yes" printsummary="yes" errorProperty="tests.failed" failureProperty="tests.failed">
+		<junit fork="yes" printsummary="withOutAndErr" errorProperty="tests.failed" failureProperty="tests.failed">
         <classpath refid="test.classpath"/>
         <sysproperty key="test.build.data" value="${build.test}/data"/>
 	      <sysproperty key="build.test" value="${build.test}"/>
@@ -249,6 +250,7 @@
             <fileset dir="${src.test}">
             	<include name="**/${testcase}.java"/>
             	<exclude name="**/TestProxyFilter.java"/>
+            	<exclude name="**/TestAuthorizationFilter.java"/>
             	<exclude name="**/TestLdapIpDirFilter.java"/>
             	<exclude name="**/TestProxyUtil.java"/>
             	<exclude name="**/TestProxyForwardServlet.java"/>
@@ -289,7 +291,7 @@
 		<copy file="${tomcat.conf.test}/web.xml" tofile="${tomcatconfig.dir}/conf/web.xml"/>
 		<copy file="${tomcat.conf.test}/tomcat-users.xml" tofile="${tomcatconfig.dir}/conf/tomcat-users.xml"/>
 	
-		<cactus warfile="${target.dir}/${cactus.warfile.name}.war" fork="yes" haltonfailure="no"
printsummary="yes" failureproperty="tests.failed">
+		<cactus warfile="${target.dir}/${cactus.warfile.name}.war" fork="yes" haltonfailure="no"
printsummary="withOutAndErr" failureproperty="tests.failed">
 			<classpath>
 				<path refid="cactus.classpath"/>
 				<pathelement location="${build.classes}"/>

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/conf/hdfsproxy-default.xml
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/conf/hdfsproxy-default.xml?rev=931243&r1=931242&r2=931243&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/conf/hdfsproxy-default.xml (original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/conf/hdfsproxy-default.xml Tue Apr  6 17:50:28
2010
@@ -100,6 +100,29 @@
   buffered during read and write operations.</description>
 </property>
 
+    <property>
+        <name>hdfsproxy.kerberos.principal</name>
+        <value>user@REALM</value>
+        <description> kerberos principal to be used by hdfsproxy </description>
+    </property>
+
+    <property>
+        <name>hdfsproxy.kerberos.keytab</name>
+        <value>proxy.prod.headless.keytab</value>
+        <description> kerberos keytab to be used by hdfsproxy </description>
+    </property>
+
+    <property>
+        <name>hdfsproxy.kerberos.default.realm</name>
+        <value>/instance@REALM</value>
+        <description> kerberos default realm appended to non-qualified userIds </description>
+    </property>
+
+    <property>
+        <name>dfs.namenode.kerberos.principal</name>
+        <value>hdfs@REALM</value>
+        <description> Namenode user name key.  </description>
+    </property>
 
 </configuration>
 

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/conf/tomcat-web.xml
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/conf/tomcat-web.xml?rev=931243&r1=931242&r2=931243&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/conf/tomcat-web.xml (original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/conf/tomcat-web.xml Tue Apr  6 17:50:28 2010
@@ -16,8 +16,8 @@
   limitations under the License.
 -->
 
-<!DOCTYPE web-app 
-    PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN" 
+<!DOCTYPE web-app
+    PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN"
     "http://java.sun.com/dtd/web-app_2_3.dtd">
 
 <web-app>
@@ -27,7 +27,7 @@
 
     <display-name>HDFS Proxy</display-name>
     <description>
-      get data from grid 
+      get data from grid
     </description>
 
 
@@ -56,17 +56,30 @@
         and comments about this application should be addressed.
       </description>
     </context-param>
-    
+
     <filter>
 	   	<filter-name>ldapIpDirFilter</filter-name>
 	   	<filter-class>org.apache.hadoop.hdfsproxy.LdapIpDirFilter</filter-class>
 		</filter>
 
-		<filter-mapping>
+    <filter>
+        <filter-name>authorizationFilter</filter-name>
+        <filter-class>org.apache.hadoop.hdfsproxy.AuthorizationFilter</filter-class>
+    </filter>
+
+    <filter-mapping>
         <filter-name>ldapIpDirFilter</filter-name>
-				<url-pattern>/*</url-pattern>
+        <url-pattern>/*</url-pattern>
+        <dispatcher>REQUEST</dispatcher>
+        <dispatcher>FORWARD</dispatcher>
+    </filter-mapping>
+
+    <filter-mapping>
+        <filter-name>authorizationFilter</filter-name>
+        <url-pattern>/*</url-pattern>
+        <dispatcher>REQUEST</dispatcher>
+        <dispatcher>FORWARD</dispatcher>
     </filter-mapping>
-    	
 
     <!-- Servlet definitions for the servlets that make up
          your web application, including initialization
@@ -93,13 +106,13 @@
          You can define any number of servlets, including zero.
     -->
 
-    
+
     <servlet>
     	<servlet-name>listPaths</servlet-name>
       <description>list paths data access</description>
       <servlet-class>org.apache.hadoop.hdfsproxy.ProxyListPathsServlet</servlet-class>
     </servlet>
-    
+
     <servlet-mapping>
         <servlet-name>listPaths</servlet-name>
         <url-pattern>/listPaths/*</url-pattern>
@@ -110,23 +123,23 @@
       <description>data access</description>
       <servlet-class>org.apache.hadoop.hdfsproxy.ProxyFileDataServlet</servlet-class>
     </servlet>
-    
+
 	  <servlet-mapping>
         <servlet-name>data</servlet-name>
         <url-pattern>/data/*</url-pattern>
     </servlet-mapping>
-    
+
     <servlet>
     	<servlet-name>streamFile</servlet-name>
       <description>stream file access</description>
       <servlet-class>org.apache.hadoop.hdfsproxy.ProxyStreamFile</servlet-class>
     </servlet>
-    
+
     <servlet-mapping>
         <servlet-name>streamFile</servlet-name>
         <url-pattern>/streamFile/*</url-pattern>
     </servlet-mapping>
-    
+
 
 		<welcome-file-list>
 		  <welcome-file>index.html</welcome-file>
@@ -139,7 +152,7 @@
 
     <session-config>
       <session-timeout>30</session-timeout>    <!-- 30 minutes -->
-    </session-config>    
+    </session-config>
 
 
 </web-app>

Added: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/AuthorizationFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/AuthorizationFilter.java?rev=931243&view=auto
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/AuthorizationFilter.java
(added)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/AuthorizationFilter.java
Tue Apr  6 17:50:28 2010
@@ -0,0 +1,142 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfsproxy;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
+
+import javax.servlet.*;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+import java.util.List;
+import java.util.regex.Pattern;
+
+public class AuthorizationFilter implements Filter {
+  public static final Log LOG = LogFactory.getLog(AuthorizationFilter.class);
+
+  /** Pattern for a filter to find out if a request is HFTP/HSFTP request */
+  protected static final Pattern HFTP_PATTERN = Pattern
+      .compile("^(/listPaths|/data|/streamFile|/file)$");
+  /**
+   * Pattern for a filter to find out if an HFTP/HSFTP request stores its file
+   * path in the extra path information associated with the URL; if not, the
+   * file path is stored in request parameter "filename"
+   */
+  protected static final Pattern FILEPATH_PATTERN = Pattern
+      .compile("^(/listPaths|/data|/file)$");
+
+  /** {@inheritDoc} **/
+  public void init(FilterConfig filterConfig) throws ServletException {
+  }
+
+  /** {@inheritDoc} **/
+  @SuppressWarnings("unchecked")
+  public void doFilter(ServletRequest request,
+                       ServletResponse response,
+                       FilterChain chain)
+      throws IOException, ServletException {
+
+    HttpServletResponse rsp = (HttpServletResponse) response;
+    HttpServletRequest rqst = (HttpServletRequest) request;
+
+    String userId = getUserId(request);
+    String groups = getGroups(request);
+    List<Path> allowedPaths = getAllowedPaths(request);
+
+    UserGroupInformation ugi =
+        UserGroupInformation.createRemoteUser(userId);
+
+    String filePath = getPathFromRequest(rqst);
+
+    if (filePath == null || !checkHdfsPath(filePath, allowedPaths)) {
+      String msg = "User " + userId + " (" + groups
+          + ") is not authorized to access path " + filePath;
+      LOG.warn(msg);
+      rsp.sendError(HttpServletResponse.SC_FORBIDDEN, msg);
+      return;
+    }
+    request.setAttribute("authorized.ugi", ugi);
+
+    LOG.info("User: " + userId + "(" + groups +
+        ") Request: " + rqst.getPathInfo() + " From: " +
+        rqst.getRemoteAddr());
+
+    chain.doFilter(request, response);
+  }
+
+  protected String getUserId(ServletRequest rqst) {
+    String userId = (String) rqst.
+        getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
+    if (userId != null)
+      userId = userId.split("[/@]")[0];
+    return userId;
+  }
+
+  protected String getGroups(ServletRequest rqst) {
+    return (String) rqst.
+        getAttribute("org.apache.hadoop.hdfsproxy.authorized.role");
+  }
+
+  @SuppressWarnings("unchecked")
+  protected List<Path> getAllowedPaths(ServletRequest request) {
+    return (List<Path>)request.
+        getAttribute("org.apache.hadoop.hdfsproxy.authorized.paths");
+  }
+
+  protected String getPathFromRequest(HttpServletRequest rqst) {
+    String filePath = null;
+    // check request path
+    String servletPath = rqst.getServletPath();
+    if (HFTP_PATTERN.matcher(servletPath).matches()) {
+      // request is an HSFTP request
+      if (FILEPATH_PATTERN.matcher(servletPath).matches()) {
+        // file path as part of the URL
+        filePath = rqst.getPathInfo() != null ? rqst.getPathInfo() : "/";
+      } else {
+        // file path is stored in "filename" parameter
+        filePath = rqst.getParameter("filename");
+      }
+    }
+    return filePath;
+  }
+
+  /** check that the requested path is listed in the ldap entry */
+  protected boolean checkHdfsPath(String pathInfo, List<Path> allowedPaths) {
+    if (pathInfo == null || pathInfo.length() == 0) {
+      LOG.info("Can't get file path from the request");
+      return false;
+    }
+    Path userPath = new Path(pathInfo);
+    while (userPath != null) {
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("\n Checking file path " + userPath);
+      }
+      if (allowedPaths.contains(userPath))
+        return true;
+      userPath = userPath.getParent();
+    }
+    return false;
+  }
+
+  /** {@inheritDoc} **/
+  public void destroy() {
+  }
+}

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/LdapIpDirFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/LdapIpDirFilter.java?rev=931243&r1=931242&r2=931243&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/LdapIpDirFilter.java
(original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/LdapIpDirFilter.java
Tue Apr  6 17:50:28 2010
@@ -61,20 +61,31 @@ public class LdapIpDirFilter implements 
   private static String hdfsPathSchemaStr;
 
   private InitialLdapContext lctx;
-  private String userId;
-  private String groupName;
-  private ArrayList<String> paths;
-
-  /** Pattern for a filter to find out if a request is HFTP/HSFTP request */
-  protected static final Pattern HFTP_PATTERN = Pattern
-      .compile("^(/listPaths|/data|/streamFile|/file)$");
-  /**
-   * Pattern for a filter to find out if an HFTP/HSFTP request stores its file
-   * path in the extra path information associated with the URL; if not, the
-   * file path is stored in request parameter "filename"
-   */
-  protected static final Pattern FILEPATH_PATTERN = Pattern
-      .compile("^(/listPaths|/data|/file)$");
+
+  private class LdapRoleEntry {
+    String userId;
+    String groupNames;
+    ArrayList<Path> paths;
+
+    void init(String userId, String groupNames, ArrayList<Path> paths) {
+      this.userId = userId;
+      this.groupNames = groupNames;
+      this.paths = paths;
+    }
+
+    boolean contains(Path path) {
+      return paths != null && paths.contains(path);
+    }
+
+    @Override
+    public String toString() {
+      return "LdapRoleEntry{" +
+          "groupName='" + groupNames + '\'' +
+          ", userId='" + userId + '\'' +
+          ", paths=" + paths +
+          '}';
+    }
+  }
 
   public void initialize(String bName, InitialLdapContext ctx) {
     // hook to cooperate unit test
@@ -85,7 +96,6 @@ public class LdapIpDirFilter implements 
     hdfsGroupSchemaStr = "userClass";
     hdfsPathSchemaStr = "documentLocation";
     lctx = ctx;
-    paths = new ArrayList<String>();
   }
 
   /** {@inheritDoc} */
@@ -95,11 +105,7 @@ public class LdapIpDirFilter implements 
     conf.addResource("hdfsproxy-default.xml");
     conf.addResource("hdfsproxy-site.xml");
     // extract namenode from source conf.
-    String nn = conf.get("fs.default.name");
-    if (nn == null) {
-      throw new ServletException(
-          "Proxy source cluster name node address not speficied");
-    }
+    String nn = ProxyUtil.getNamenode(conf);
     InetSocketAddress nAddr = NetUtils.createSocketAddr(nn);
     context.setAttribute("name.node.address", nAddr);
     context.setAttribute("name.conf", conf);
@@ -129,13 +135,11 @@ public class LdapIpDirFilter implements 
       hdfsIpSchemaStrPrefix = conf.get(
           "hdfsproxy.ldap.ip.schema.string.prefix", "cn=");
       hdfsUidSchemaStr = conf.get("hdfsproxy.ldap.uid.schema.string", "uid");
-      hdfsGroupSchemaStr = conf.get("hdfsproxy.ldap.group.schema.string",
-          "userClass");
+      hdfsGroupSchemaStr = conf.get("hdfsproxy.ldap.group.schema.string", "userClass");
       hdfsPathSchemaStr = conf.get("hdfsproxy.ldap.hdfs.path.schema.string",
           "documentLocation");
-      paths = new ArrayList<String>();
     }
-    LOG.info("LdapIpDirFilter initialization success: " + nn);
+    LOG.info("LdapIpDirFilter initialization successful");
   }
 
   /** {@inheritDoc} */
@@ -163,64 +167,57 @@ public class LdapIpDirFilter implements 
       b.append("\n The Servlet Path is " + rqst.getServletPath());
       LOG.debug(b.toString());
     }
+    LdapRoleEntry ldapent = new LdapRoleEntry();
     // check ip address
     String userIp = rqst.getRemoteAddr();
-    boolean isAuthorized = false;
     try {
-      isAuthorized = checkUserIp(userIp);
+      boolean isAuthorized = getLdapRoleEntryFromUserIp(userIp, ldapent);
       if (!isAuthorized) {
-        rsp.sendError(HttpServletResponse.SC_FORBIDDEN,
-            "IP not authorized to access");
+        rsp.sendError(HttpServletResponse.SC_FORBIDDEN, "IP " + userIp
+            + " is not authorized to access");
         return;
       }
     } catch (NamingException ne) {
-      throw new IOException("NameingException in searching ldap"
+      throw new IOException("NamingException while searching ldap"
           + ne.toString());
     }
-    // check request path
-    String servletPath = rqst.getServletPath();
-    if (HFTP_PATTERN.matcher(servletPath).matches()) {
-      // request is an HSFTP request
-      if (FILEPATH_PATTERN.matcher(servletPath).matches()) {
-        // file path as part of the URL
-        isAuthorized = checkHdfsPath(rqst.getPathInfo() != null ? rqst
-            .getPathInfo() : "/");
-      } else {
-        // file path is stored in "filename" parameter
-        isAuthorized = checkHdfsPath(rqst.getParameter("filename"));
-      }
-    }
-    if (!isAuthorized) {
-      rsp.sendError(HttpServletResponse.SC_FORBIDDEN,
-          "User not authorized to access path");
-      return;
-    }
-    UserGroupInformation ugi = UserGroupInformation.createRemoteUser(userId);
-    rqst.setAttribute("authorized.ugi", ugi);
-    // since we cannot pass ugi object cross context as they are from different
+
+    // since we cannot pass ugi object cross context as they are from
+    // different
     // classloaders in different war file, we have to use String attribute.
-    rqst.setAttribute("org.apache.hadoop.hdfsproxy.authorized.userID", userId);
-    rqst.setAttribute("org.apache.hadoop.hdfsproxy.authorized.role", groupName);
-    LOG.info("User: " + userId + " (" + groupName + ") Request: "
-        + rqst.getPathInfo() + " From: " + rqst.getRemoteAddr());
+    rqst.setAttribute("org.apache.hadoop.hdfsproxy.authorized.userID",
+        ldapent.userId);
+    rqst.setAttribute("org.apache.hadoop.hdfsproxy.authorized.role",
+        ldapent.groupNames);
+    rqst.setAttribute("org.apache.hadoop.hdfsproxy.authorized.paths",
+        ldapent.paths);
+    LOG.info("User: " + ldapent.userId + ", Request: " + rqst.getPathInfo() +
+            " From: " + rqst.getRemoteAddr());
     chain.doFilter(request, response);
   }
 
-  /** check that client's ip is listed in the Ldap Roles */
+  /**
+   * check if client's ip is listed in the Ldap Roles if yes, return true and
+   * update ldapent. if not, return false
+   * */
   @SuppressWarnings("unchecked")
-  private boolean checkUserIp(String userIp) throws NamingException {
+  private boolean getLdapRoleEntryFromUserIp(String userIp,
+      LdapRoleEntry ldapent) throws NamingException {
     String ipMember = hdfsIpSchemaStrPrefix + userIp;
     Attributes matchAttrs = new BasicAttributes(true);
     matchAttrs.put(new BasicAttribute(hdfsIpSchemaStr, ipMember));
     matchAttrs.put(new BasicAttribute(hdfsUidSchemaStr));
+    matchAttrs.put(new BasicAttribute(hdfsGroupSchemaStr));
     matchAttrs.put(new BasicAttribute(hdfsPathSchemaStr));
 
-    String[] attrIDs = { hdfsUidSchemaStr, hdfsGroupSchemaStr,
-        hdfsPathSchemaStr };
+    String[] attrIDs = { hdfsUidSchemaStr, hdfsGroupSchemaStr, hdfsPathSchemaStr };
 
     NamingEnumeration<SearchResult> results = lctx.search(baseName, matchAttrs,
         attrIDs);
     if (results.hasMore()) {
+      String userId = null;
+      String groupNames = null;
+      ArrayList<Path> paths = new ArrayList<Path>();
       SearchResult sr = results.next();
       Attributes attrs = sr.getAttributes();
       for (NamingEnumeration ne = attrs.getAll(); ne.hasMore();) {
@@ -228,36 +225,20 @@ public class LdapIpDirFilter implements 
         if (hdfsUidSchemaStr.equalsIgnoreCase(attr.getID())) {
           userId = (String) attr.get();
         } else if (hdfsGroupSchemaStr.equalsIgnoreCase(attr.getID())) {
-          groupName = (String) attr.get();
+          groupNames = (String) attr.get();
         } else if (hdfsPathSchemaStr.equalsIgnoreCase(attr.getID())) {
           for (NamingEnumeration e = attr.getAll(); e.hasMore();) {
-            paths.add((String) e.next());
+            String pathStr = (String) e.next();
+            paths.add(new Path(pathStr));
           }
         }
       }
+      ldapent.init(userId, groupNames, paths);
+      if (LOG.isDebugEnabled()) LOG.debug(ldapent);
       return true;
     }
     LOG.info("Ip address " + userIp
         + " is not authorized to access the proxy server");
     return false;
   }
-
-  /** check that the requested path is listed in the ldap entry */
-  private boolean checkHdfsPath(String pathInfo) {
-    if (pathInfo == null || pathInfo.length() == 0) {
-      LOG.info("Can't get file path from the request");
-      return false;
-    }
-    Path userPath = new Path(pathInfo);
-    while (userPath != null) {
-      if (LOG.isDebugEnabled()) {
-        LOG.debug("\n Checking file path " + userPath);
-      }
-      if (paths.contains(userPath.toString()))
-        return true;
-      userPath = userPath.getParent();
-    }
-    LOG.info("User " + userId + " is not authorized to access " + pathInfo);
-    return false;
-  }
 }

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java?rev=931243&r1=931242&r2=931243&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java
(original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java
Tue Apr  6 17:50:28 2010
@@ -21,8 +21,6 @@ import java.io.IOException;
 import java.net.URI;
 import java.net.URISyntaxException;
 
-import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 
 import org.apache.hadoop.conf.Configuration;
@@ -31,8 +29,6 @@ import org.apache.hadoop.hdfs.protocol.H
 import org.apache.hadoop.hdfs.server.namenode.FileDataServlet;
 import org.apache.hadoop.security.UserGroupInformation;
 
-import org.apache.hadoop.hdfs.HdfsConfiguration;
-
 /** {@inheritDoc} */
 public class ProxyFileDataServlet extends FileDataServlet {
   /** For java.io.Serializable */
@@ -40,20 +36,11 @@ public class ProxyFileDataServlet extend
 
   /** {@inheritDoc} */
   @Override
-  public void init() throws ServletException {
-    ServletContext context = getServletContext();
-    if (context.getAttribute("name.conf") == null) {
-      context.setAttribute("name.conf", new HdfsConfiguration());
-    }
-  }
-
-  /** {@inheritDoc} */
-  @Override
   protected URI createUri(String parent, HdfsFileStatus i, UserGroupInformation ugi,
       ClientProtocol nnproxy, HttpServletRequest request) throws IOException,
       URISyntaxException {
     return new URI(request.getScheme(), null, request.getServerName(), request
-        .getServerPort(), "/streamFile", "filename=" + i.getFullName(parent) 
+        .getServerPort(), "/streamFile", "filename=" + i.getFullName(parent)
         + "&ugi=" + ugi.getShortUserName(), null);
   }
 
@@ -63,6 +50,6 @@ public class ProxyFileDataServlet extend
                                         Configuration conf) {
     String userID = (String) request
         .getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
-    return UserGroupInformation.createRemoteUser(userID);
+    return ProxyUtil.getProxyUGIFor(userID);
   }
 }

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileForward.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileForward.java?rev=931243&r1=931242&r2=931243&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileForward.java
(original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileForward.java
Tue Apr  6 17:50:28 2010
@@ -31,8 +31,9 @@ public class ProxyFileForward extends Pr
   protected String buildForwardPath(HttpServletRequest request, String pathInfo) {
     String path = "/streamFile";
     path += "?filename=" + request.getPathInfo();
-    UserGroupInformation ugi = 
-                   (UserGroupInformation)request.getAttribute("authorized.ugi");
+    String userID = (String) request.
+        getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
+    UserGroupInformation ugi = ProxyUtil.getProxyUGIFor(userID);
     if (ugi != null) {
       path += "&ugi=" + ugi.getShortUserName();
     }

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyForwardServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyForwardServlet.java?rev=931243&r1=931242&r2=931243&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyForwardServlet.java
(original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyForwardServlet.java
Tue Apr  6 17:50:28 2010
@@ -66,8 +66,11 @@ public class ProxyForwardServlet extends
     ServletContext curContext = getServletContext();
     ServletContext dstContext = curContext.getContext(version);
 
-    if (dstContext == null) {
-      LOG.info("Context non-exist or restricted from access: " + version);
+    // avoid infinite forwarding.
+    if (dstContext == null
+        || getServletContext().equals(dstContext)) {
+      LOG.error("Context (" + version
+          + ".war) non-exist or restricted from access");
       response.sendError(HttpServletResponse.SC_NOT_FOUND);
       return;
     }

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java?rev=931243&r1=931242&r2=931243&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java
(original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java
Tue Apr  6 17:50:28 2010
@@ -17,14 +17,11 @@
  */
 package org.apache.hadoop.hdfsproxy;
 
-import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.server.namenode.ListPathsServlet;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.conf.Configuration;
 
 /** {@inheritDoc} */
 public class ProxyListPathsServlet extends ListPathsServlet {
@@ -33,19 +30,10 @@ public class ProxyListPathsServlet exten
 
   /** {@inheritDoc} */
   @Override
-  public void init() throws ServletException {
-    ServletContext context = getServletContext();
-    if (context.getAttribute("name.conf") == null) {
-      context.setAttribute("name.conf", new HdfsConfiguration());
-    }
-  }
-
-  /** {@inheritDoc} */
-  @Override
   protected UserGroupInformation getUGI(HttpServletRequest request,
                                         Configuration conf) {
     String userID = (String) request
         .getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
-    return UserGroupInformation.createRemoteUser(userID);
+    return ProxyUtil.getProxyUGIFor(userID);
   }
 }

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java?rev=931243&r1=931242&r2=931243&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java
(original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java
Tue Apr  6 17:50:28 2010
@@ -22,14 +22,12 @@ import java.net.InetSocketAddress;
 import java.security.PrivilegedExceptionAction;
 
 import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.DFSClient;
-import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.server.namenode.StreamFile;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.conf.Configuration;
 
 /** {@inheritDoc} */
 public class ProxyStreamFile extends StreamFile {
@@ -38,31 +36,22 @@ public class ProxyStreamFile extends Str
 
   /** {@inheritDoc} */
   @Override
-  public void init() throws ServletException {
-    ServletContext context = getServletContext();
-    if (context.getAttribute("name.conf") == null) {
-      context.setAttribute("name.conf", new HdfsConfiguration());
-    }
-  }
-
-  /** {@inheritDoc} */
-  @Override
   protected DFSClient getDFSClient(HttpServletRequest request)
       throws IOException, InterruptedException {
     ServletContext context = getServletContext();
-    final Configuration conf = 
-      (Configuration) context.getAttribute("name.conf");
-    final InetSocketAddress nameNodeAddr = (InetSocketAddress) context
-        .getAttribute("name.node.address");
-    
+    final Configuration conf =
+        (Configuration) context.getAttribute("name.conf");
+    final InetSocketAddress nameNodeAddr =
+        (InetSocketAddress) context.getAttribute("name.node.address");
+
     DFSClient client = getUGI(request, conf).doAs
-      ( new PrivilegedExceptionAction<DFSClient>() {
-      @Override
-      public DFSClient run() throws IOException {
-        return new DFSClient(nameNodeAddr, conf);
-      }
-    });
-    
+        ( new PrivilegedExceptionAction<DFSClient>() {
+          @Override
+          public DFSClient run() throws IOException {
+            return new DFSClient(nameNodeAddr, conf);
+          }
+        });
+
     return client;
   }
 
@@ -72,8 +61,7 @@ public class ProxyStreamFile extends Str
                                         Configuration conf) {
     String userID = (String) request
         .getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
-
-    return UserGroupInformation.createRemoteUser(userID);
+    return ProxyUtil.getProxyUGIFor(userID);
   }
 
 }

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java?rev=931243&r1=931242&r2=931243&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java
(original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java
Tue Apr  6 17:50:28 2010
@@ -41,6 +41,7 @@ import javax.net.ssl.TrustManager;
 import javax.net.ssl.TrustManagerFactory;
 import javax.net.ssl.X509TrustManager;
 import javax.servlet.http.HttpServletResponse;
+import javax.servlet.ServletException;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -50,6 +51,7 @@ import org.apache.hadoop.fs.FSInputStrea
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.util.HostsFileReader;
+import org.apache.hadoop.security.UserGroupInformation;
 
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 
@@ -332,4 +334,25 @@ public class ProxyUtil {
     }
   }
 
+  public static String getNamenode(Configuration conf)
+      throws ServletException {
+    String namenode = conf.get("fs.default.name");
+    if (namenode == null) {
+      throw new
+          ServletException("Proxy source cluster name node address missing");
+    }
+    return namenode;
+  }
+
+  public static UserGroupInformation getProxyUGIFor(String userID) {
+    try {
+      return UserGroupInformation.
+          createProxyUser(userID, UserGroupInformation.getLoginUser());
+    } catch (IOException e) {
+      throw new
+          RuntimeException("Unable get current logged in user", e);
+    }
+  }
+
+
 }

Added: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestAuthorizationFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestAuthorizationFilter.java?rev=931243&view=auto
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestAuthorizationFilter.java
(added)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestAuthorizationFilter.java
Tue Apr  6 17:50:28 2010
@@ -0,0 +1,84 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfsproxy;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.util.List;
+import java.util.ArrayList;
+
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+
+import org.apache.cactus.FilterTestCase;
+import org.apache.cactus.WebRequest;
+import org.apache.cactus.WebResponse;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.Path;
+
+public class TestAuthorizationFilter extends FilterTestCase {
+
+  public static final Log LOG = LogFactory.getLog(TestAuthorizationFilter.class);
+
+  private class DummyFilterChain implements FilterChain {
+    public void doFilter(ServletRequest theRequest, ServletResponse theResponse)
+        throws IOException, ServletException {
+      PrintWriter writer = theResponse.getWriter();
+
+      writer.print("<p>some content</p>");
+      writer.close();
+    }
+
+    public void init(FilterConfig theConfig) {
+    }
+
+    public void destroy() {
+    }
+  }
+
+  public void beginPathRestriction(WebRequest theRequest) {
+    theRequest.setURL("proxy-test:0", null, "/streamFile", null,
+        "filename=/nontestdir");
+  }
+
+  public void testPathRestriction() throws ServletException, IOException {
+    AuthorizationFilter filter = new AuthorizationFilter();
+    request.setRemoteIPAddress("127.0.0.1");
+    request.setAttribute("org.apache.hadoop.hdfsproxy.authorized.userID",
+        System.getProperty("user.name"));
+    request.setAttribute("org.apache.hadoop.hdfsproxy.authorized.role",
+        "users");
+    List<Path> paths = new ArrayList<Path>();
+    paths.add(new Path("/deny"));
+    request.setAttribute("org.apache.hadoop.hdfsproxy.authorized.paths",
+        paths);
+    FilterChain mockFilterChain = new DummyFilterChain();
+    filter.doFilter(request, response, mockFilterChain);
+  }
+
+  public void endPathRestriction(WebResponse theResponse) {
+    assertEquals(theResponse.getStatusCode(), 403);
+    assertTrue("Text missing 'User not authorized to access path' : : ["
+        + theResponse.getText() + "]", theResponse.getText().indexOf(
+        "is not authorized to access path") > 0);
+  }
+}

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestHdfsProxy.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestHdfsProxy.java?rev=931243&r1=931242&r2=931243&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestHdfsProxy.java
(original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestHdfsProxy.java
Tue Apr  6 17:50:28 2010
@@ -205,6 +205,11 @@ public class TestHdfsProxy extends TestC
     try {
 
       final Configuration dfsConf = new HdfsConfiguration();
+      dfsConf.set("hadoop.proxyuser." + System.getProperty("user.name") +
+          ".users", "users");
+      dfsConf.set("hadoop.proxyuser.users.ip-addresses", "localhost");
+      dfsConf.set("hadoop.proxyuser." + System.getProperty("user.name") +
+          ".ip-addresses", "localhost");
       cluster = new MiniDFSCluster(dfsConf, 2, true, null);
       cluster.waitActive();
 

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestLdapIpDirFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestLdapIpDirFilter.java?rev=931243&r1=931242&r2=931243&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestLdapIpDirFilter.java
(original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestLdapIpDirFilter.java
Tue Apr  6 17:50:28 2010
@@ -72,33 +72,7 @@ public class TestLdapIpDirFilter extends
     assertEquals(theResponse.getStatusCode(), 403);
     assertTrue("Text missing 'IP not authorized to access' : : ["
         + theResponse.getText() + "]", theResponse.getText().indexOf(
-        "IP not authorized to access") > 0);
-  }
-
-  public void beginPathRestriction(WebRequest theRequest) {
-    theRequest.setURL("proxy-test:0", null, "/streamFile", null,
-        "filename=/nontestdir");
-  }
-
-  public void testPathRestriction() throws ServletException, IOException,
-      NamingException {
-    LdapIpDirFilter filter = new LdapIpDirFilter();
-    String baseName = "ou=proxyroles,dc=mycompany,dc=com";
-    DummyLdapContext dlc = new DummyLdapContext();
-    filter.initialize(baseName, dlc);
-    request.setRemoteIPAddress("127.0.0.1");
-    request.removeAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
-    FilterChain mockFilterChain = new DummyFilterChain();
-    filter.doFilter(request, response, mockFilterChain);
-    assertNull(request
-        .getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID"));
-  }
-
-  public void endPathRestriction(WebResponse theResponse) {
-    assertEquals(theResponse.getStatusCode(), 403);
-    assertTrue("Text missing 'User not authorized to access path' : : ["
-        + theResponse.getText() + "]", theResponse.getText().indexOf(
-        "User not authorized to access path") > 0);
+        "not authorized to access") > 0);
   }
 
   public void beginDoFilter(WebRequest theRequest) {



Mime
View raw message