hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cdoug...@apache.org
Subject svn commit: r788898 - in /hadoop/hdfs/trunk: ./ src/contrib/hdfsproxy/ src/contrib/hdfsproxy/conf/ src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/ src/contrib/hdfsproxy/src/test/re...
Date Fri, 26 Jun 2009 22:48:24 GMT
Author: cdouglas
Date: Fri Jun 26 22:48:23 2009
New Revision: 788898

URL: http://svn.apache.org/viewvc?rev=788898&view=rev
Log:
HDFS-447. Add LDAP lookup to hdfsproxy. Contributed by Zhiyong Zhang

Added:
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/LdapIpDirFilter.java
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/DummyLdapContext.java
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestLdapIpDirFilter.java
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/resources/tomcat-web.xml
Modified:
    hadoop/hdfs/trunk/CHANGES.txt
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/build.xml
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/conf/hdfsproxy-default.xml
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/conf/tomcat-forward-web.xml
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/conf/tomcat-web.xml
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyForwardServlet.java
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/hdfsproxy-default.xml
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/user-certs.xml
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HsftpFileSystem.java

Modified: hadoop/hdfs/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/CHANGES.txt?rev=788898&r1=788897&r2=788898&view=diff
==============================================================================
--- hadoop/hdfs/trunk/CHANGES.txt (original)
+++ hadoop/hdfs/trunk/CHANGES.txt Fri Jun 26 22:48:23 2009
@@ -7,6 +7,8 @@
     HDFS-436. Introduce AspectJ framework for HDFS code and tests.
     (Konstantin Boudnik via szetszwo)
 
+    HDFS-447. Add LDAP lookup to hdfsproxy. (Zhiyong Zhang via cdouglas)
+
   IMPROVEMENTS
 
     HDFS-381. Remove blocks from DataNode maps when corresponding file

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/build.xml?rev=788898&r1=788897&r2=788898&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/build.xml (original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/build.xml Fri Jun 26 22:48:23 2009
@@ -60,6 +60,7 @@
   	<and>
 	    <or>
 	    	<equals arg1="${testcase}" arg2="TestProxyFilter" />
+	    	<equals arg1="${testcase}" arg2="TestLdapIpDirFilter" />
 				<equals arg1="${testcase}" arg2="TestProxyUtil" />
 				<equals arg1="${testcase}" arg2="TestProxyForwardServlet" />
 				<not>
@@ -118,22 +119,21 @@
 	  <war destfile="${build.dir}/${final.name}.war" webxml="${basedir}/conf/tomcat-web.xml">
 	    <lib dir="${common.ivy.lib.dir}">
 	      <include name="commons-logging-${commons-logging.version}.jar"/>
-              <include name="junit-${junit.version}.jar"/>
-              <include name="log4j-${log4j.version}.jar"/>
-              <include name="slf4j-api-${slf4j-api.version}.jar"/>
-              <include name="slf4j-log4j12-${slf4j-log4j12.version}.jar"/>
-              <include name="xmlenc-${xmlenc.version}.jar"/>
-              <include name="core-${core.version}.jar"/> 
+        <include name="junit-${junit.version}.jar"/>
+        <include name="log4j-${log4j.version}.jar"/>
+        <include name="slf4j-api-${slf4j-api.version}.jar"/>
+        <include name="slf4j-log4j12-${slf4j-log4j12.version}.jar"/>
+        <include name="xmlenc-${xmlenc.version}.jar"/>
+        <include name="core-${core.vesion}.jar"/> 
+	    </lib>
+	    <lib dir="${hadoop.root}/lib">
+	    	<include name="hadoop-core-${hadoop-version}.jar"/>
 	    </lib>
-            <lib dir="${hadoop.root}/lib">
-              <include name="hadoop-mapred-tools-${hadoop-version}.jar"/>
-              <include name="hadoop-mapred-examples-${hadoop-version}.jar"/>
-              <include name="hadoop-mapred-test-${hadoop-version}.jar"/>
-              <include name="hadoop-core-test-${hadoop-version}.jar"/>
-              <include name="hadoop-core-${hadoop-version}.jar"/>
-              <include name="hadoop-mapred-test-${hadoop-version}.jar"/>
-            </lib>  
-	    <classes dir="${proxy.conf.dir}" excludes="**/*.example **/*.template **/*.sh hadoop-site.xml"/>
+	    <classes dir="${proxy.conf.dir}">
+	    	<include name="hdfsproxy-default.xml"/>
+	    	<include name="user-certs.xml"/>
+	    	<include name="user-permissions.xml"/>
+	    </classes>
 	    <classes dir="${build.classes}"/>
 	    <classes dir="${hadoop.root}/build/classes"/>
 	  </war>
@@ -153,40 +153,69 @@
         <include name="xmlenc-${xmlenc.version}.jar"/>
         <include name="core-${core.vesion}.jar"/> 
 	    </lib>
-	    <classes dir="${proxy.conf.dir}" excludes="**/*.example **/*.template **/*.sh hadoop-site.xml"/>
+	    <lib dir="${hadoop.root}/lib">
+		<include name="hadoop-core-${hadoop-version}.jar"/>
+	    </lib>
+	    <classes dir="${proxy.conf.dir}">
+	    	<include name="hdfsproxy-default.xml"/>
+	    	<include name="hdfsproxy-site.xml"/>
+	    	<include name="user-certs.xml"/>
+	    	<include name="user-permissions.xml"/>
+	    </classes>
+	    <classes dir="${build.classes}"/>
+	    <classes dir="${hadoop.root}/build/classes"/>
+	  </war>
+	</target>
+	
+	<target name="testwar" depends="compile" description="Create testing war">
+		<echo>
+			Building the testing .war file 
+		</echo>
+	  <war destfile="${build.dir}/${final.name}-test.war" webxml="${src.test.resources}/tomcat-web.xml">
+	    <lib dir="${common.ivy.lib.dir}">
+	      <include name="commons-logging-${commons-logging.version}.jar"/>
+        <include name="junit-${junit.version}.jar"/>
+        <include name="log4j-${log4j.version}.jar"/>
+        <include name="slf4j-api-${slf4j-api.version}.jar"/>
+        <include name="slf4j-log4j12-${slf4j-log4j12.version}.jar"/>
+        <include name="xmlenc-${xmlenc.version}.jar"/>
+        <include name="core-${core.vesion}.jar"/> 
+	    </lib>
+	    <lib dir="${hadoop.root}/lib">
+	    	<include name="hadoop-core-${hadoop-version}.jar"/>
+	    </lib>
+	    <classes dir="${proxy.conf.test}" excludes="**/*.template **/*.sh"/>
 	    <classes dir="${build.classes}"/>
 	    <classes dir="${hadoop.root}/build/classes"/>
 	  </war>
 	</target>	
 	
-	<target name="cactifywar" depends="war,load-tasks,cactifywar-pure,cactifywar-clover" description="To include clover coverage test use -Dclover.home ..."/>
+	<target name="cactifywar" depends="testwar,load-tasks,cactifywar-pure,cactifywar-clover" description="To include clover coverage test use -Dclover.home ..."/>
 	
-	<target name="cactifywar-pure" depends="war,load-tasks" unless="useClover">
+	<target name="cactifywar-pure" depends="testwar,load-tasks" unless="useClover">
 		<mkdir dir="${target.dir}" />
 		<echo> no clover found ...</echo>
-    <cactifywar srcfile="${build.dir}/${final.name}.war"
+    <cactifywar srcfile="${build.dir}/${final.name}-test.war"
         destfile="${target.dir}/${cactus.warfile.name}.war"
         mergewebxml="${src.test.resources}/cactus-web.xml">
       <servletredirector/>
       <servletredirector name="ServletRedirectorSecure"
           mapping="/ServletRedirectorSecure" roles="test"/>
-      <filterredirector mapping="/test/filterRedirector.jsp"/>
-      <classes dir="${proxy.conf.test}" excludes="**/*.template **/*.sh"/>
+      <filterredirector mapping="/test/filterRedirector.jsp"/>      
       <classes dir="${test.build.dir}"/>
     </cactifywar>    	
 	</target>
 
-	<target name="cactifywar-clover" depends="war,load-tasks" if="useClover">
+	<target name="cactifywar-clover" depends="testwar,load-tasks" if="useClover">
 		<mkdir dir="${target.dir}" />
 		<echo> Including clover.jar in the war file ...</echo>
-    <cactifywar srcfile="${build.dir}/${final.name}.war"
+    <cactifywar srcfile="${build.dir}/${final.name}-test.war"
         destfile="${target.dir}/${cactus.warfile.name}.war"
         mergewebxml="${src.test.resources}/cactus-web.xml">
       <servletredirector/>
       <servletredirector name="ServletRedirectorSecure"
           mapping="/ServletRedirectorSecure" roles="test"/>
       <filterredirector mapping="/test/filterRedirector.jsp"/>
-      <classes dir="${proxy.conf.test}" excludes="**/*.template **/*.sh"/>
       <classes dir="${test.build.dir}"/>
       <lib dir="${clover.home}/lib">
       	<include name="clover.jar"/> 
@@ -224,6 +253,7 @@
             <fileset dir="${src.test}">
             	<include name="**/${testcase}.java"/>
             	<exclude name="**/TestProxyFilter.java"/>
+            	<exclude name="**/TestLdapIpDirFilter.java"/>
             	<exclude name="**/TestProxyUtil.java"/>
             	<exclude name="**/TestProxyForwardServlet.java"/>
             </fileset>
@@ -267,7 +297,6 @@
 			<classpath>
 				<path refid="cactus.classpath"/>
 				<pathelement location="${build.classes}"/>
-				<pathelement location="${proxy.conf.dir}"/>
 				<pathelement location="${src.test.resources}"/>
 				<pathelement location="${src.test.resources}/proxy-config"/>
 			</classpath>			
@@ -448,15 +477,12 @@
   	<pathelement location="${proxy.conf.test}" />
     <pathelement location="${test.build.dir}" />
     <pathelement location="${hadoop.root}/build/test/classes"/>
-    <pathelement location="${hadoop.root}/lib/hadoop-core-test-${hadoop-version}.jar"/>
-    <pathelement location="${hadoop.root}/build/test/hdfs/classes"/>
-    <pathelement location="${hadoop.root}/lib/hadoop-mapred-test-${hadoop-version}.jar"/>
     <!--<pathelement location="${hadoop.root}/src/contrib/test"/>-->
     <pathelement location="${hadoop.root}/conf"/>
     <pathelement location="${hadoop.root}/build"/>
     <pathelement location="${hadoop.root}/build/classes"/>
+    <pathelement location="${hadoop.root}/build/tools"/>
     <pathelement location="${build.examples}"/>
-    <pathelement location="${hadoop.root}/lib/hadoop-mapred-tools-${hadoop-version}.jar"/>
     <pathelement path="${clover.jar}"/>
     <path refid="contrib-classpath"/>
   </path>

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/conf/hdfsproxy-default.xml
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/conf/hdfsproxy-default.xml?rev=788898&r1=788897&r2=788898&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/conf/hdfsproxy-default.xml (original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/conf/hdfsproxy-default.xml Fri Jun 26 22:48:23 2009
@@ -55,5 +55,51 @@
   </description>
 </property>
 
+<property>
+  <name>hdfsproxy.ldap.initial.context.factory</name>
+  <value>com.sun.jndi.ldap.LdapCtxFactory</value>
+  <description> ldap initial context factory
+  </description>
+</property>
+
+<property>
+  <name>hdfsproxy.ldap.provider.url</name>
+  <value>ldap://localhost:389</value>
+  <description> ldap server address
+  </description>
+</property>
+
+<property>
+  <name>hdfsproxy.ldap.role.base</name>
+  <value>ou=proxyroles,dc=mycompany,dc=com</value>
+  <description> ldap role base
+  </description>
+</property>
+
+<property>
+    <name>fs.default.name</name>
+    <!-- cluster variant -->
+    <value>hdfs://localhost:54321</value>
+    <description>The name of the default file system.  Either the
+  literal string "local" or a host:port for NDFS.</description>
+    <final>true</final>
+  </property>
+
+<property>
+  <name>dfs.block.size</name>
+  <value>134217728</value>
+  <description>The default block size for new files.</description>
+</property>
+
+<property>
+    <name>io.file.buffer.size</name>
+    <value>131072</value>
+    <description>The size of buffer for use in sequence files.
+  The size of this buffer should probably be a multiple of hardware
+  page size (4096 on Intel x86), and it determines how much data is
+  buffered during read and write operations.</description>
+</property>
+
+
 </configuration>
 

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/conf/tomcat-forward-web.xml
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/conf/tomcat-forward-web.xml?rev=788898&r1=788897&r2=788898&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/conf/tomcat-forward-web.xml (original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/conf/tomcat-forward-web.xml Fri Jun 26 22:48:23 2009
@@ -38,21 +38,19 @@
         and comments about this application should be addressed.
       </description>
     </context-param>
-    
+          
     <filter>
-	   	<filter-name>proxyFilter</filter-name>
-	   	<filter-class>org.apache.hadoop.hdfsproxy.ProxyFilter</filter-class>
-	   	<init-param>
-	      <param-name>filteraddress</param-name>
-	      <param-value>10</param-value>
-	   	</init-param>
+	   	<filter-name>ldapIpDirFilter</filter-name>
+	   	<filter-class>org.apache.hadoop.hdfsproxy.LdapIpDirFilter</filter-class>
 		</filter>
 
 		<filter-mapping>
-        <filter-name>proxyFilter</filter-name>
+        <filter-name>ldapIpDirFilter</filter-name>
 				<url-pattern>/*</url-pattern>
     </filter-mapping>
 
+
+
     
     <servlet>
     	<servlet-name>proxyForward</servlet-name>
@@ -84,6 +82,7 @@
         <url-pattern>/file/*</url-pattern>
     </servlet-mapping>
     
+    
 
 		<welcome-file-list>
 		  <welcome-file>index.html</welcome-file>

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/conf/tomcat-web.xml
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/conf/tomcat-web.xml?rev=788898&r1=788897&r2=788898&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/conf/tomcat-web.xml (original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/conf/tomcat-web.xml Fri Jun 26 22:48:23 2009
@@ -58,21 +58,16 @@
     </context-param>
     
     <filter>
-	   	<filter-name>proxyFilter</filter-name>
-	   	<filter-class>org.apache.hadoop.hdfsproxy.ProxyFilter</filter-class>
-	   	<init-param>
-	      <param-name>filteraddress</param-name>
-	      <param-value>10</param-value>
-	   	</init-param>
+	   	<filter-name>ldapIpDirFilter</filter-name>
+	   	<filter-class>org.apache.hadoop.hdfsproxy.LdapIpDirFilter</filter-class>
 		</filter>
 
 		<filter-mapping>
-        <filter-name>proxyFilter</filter-name>
+        <filter-name>ldapIpDirFilter</filter-name>
 				<url-pattern>/*</url-pattern>
     </filter-mapping>
     	
 
-
     <!-- Servlet definitions for the servlets that make up
          your web application, including initialization
          parameters.  With Tomcat, you can also send requests

Added: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/LdapIpDirFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/LdapIpDirFilter.java?rev=788898&view=auto
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/LdapIpDirFilter.java (added)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/LdapIpDirFilter.java Fri Jun 26 22:48:23 2009
@@ -0,0 +1,262 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfsproxy;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.util.ArrayList;
+import java.util.Hashtable;
+import java.util.regex.Pattern;
+
+import javax.naming.NamingEnumeration;
+import javax.naming.NamingException;
+import javax.naming.directory.Attribute;
+import javax.naming.directory.Attributes;
+import javax.naming.directory.BasicAttribute;
+import javax.naming.directory.BasicAttributes;
+import javax.naming.directory.SearchResult;
+import javax.naming.ldap.InitialLdapContext;
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.UnixUserGroupInformation;
+
+public class LdapIpDirFilter implements Filter {
+  public static final Log LOG = LogFactory.getLog(LdapIpDirFilter.class);
+
+  private static String baseName;
+  private static String hdfsIpSchemaStr;
+  private static String hdfsIpSchemaStrPrefix;
+  private static String hdfsUidSchemaStr;
+  private static String hdfsGroupSchemaStr;
+  private static String hdfsPathSchemaStr;
+
+  private InitialLdapContext lctx;
+  private String userId;
+  private String groupName;
+  private ArrayList<String> paths;
+
+  /** Pattern for a filter to find out if a request is HFTP/HSFTP request */
+  protected static final Pattern HFTP_PATTERN = Pattern
+      .compile("^(/listPaths|/data|/streamFile|/file)$");
+  /**
+   * Pattern for a filter to find out if an HFTP/HSFTP request stores its file
+   * path in the extra path information associated with the URL; if not, the
+   * file path is stored in request parameter "filename"
+   */
+  protected static final Pattern FILEPATH_PATTERN = Pattern
+      .compile("^(/listPaths|/data|/file)$");
+
+  public void initialize(String bName, InitialLdapContext ctx) {
+    // hook to cooperate unit test
+    baseName = bName;
+    hdfsIpSchemaStr = "uniqueMember";
+    hdfsIpSchemaStrPrefix = "cn=";
+    hdfsUidSchemaStr = "uid";
+    hdfsGroupSchemaStr = "userClass";
+    hdfsPathSchemaStr = "documentLocation";
+    lctx = ctx;
+    paths = new ArrayList<String>();
+  }
+
+  /** {@inheritDoc} */
+  public void init(FilterConfig filterConfig) throws ServletException {
+    ServletContext context = filterConfig.getServletContext();
+    Configuration conf = new Configuration(false);
+    conf.addResource("hdfsproxy-default.xml");
+    conf.addResource("hdfsproxy-site.xml");
+    // extract namenode from source conf.
+    String nn = conf.get("fs.default.name");
+    if (nn == null) {
+      throw new ServletException(
+          "Proxy source cluster name node address not speficied");
+    }
+    InetSocketAddress nAddr = NetUtils.createSocketAddr(nn);
+    context.setAttribute("name.node.address", nAddr);
+    context.setAttribute("name.conf", conf);
+
+    // for storing hostname <--> cluster mapping to decide which source cluster
+    // to forward
+    context.setAttribute("org.apache.hadoop.hdfsproxy.conf", conf);
+
+    if (lctx == null) {
+      Hashtable<String, String> env = new Hashtable<String, String>();
+      env.put(InitialLdapContext.INITIAL_CONTEXT_FACTORY, conf.get(
+          "hdfsproxy.ldap.initial.context.factory",
+          "com.sun.jndi.ldap.LdapCtxFactory"));
+      env.put(InitialLdapContext.PROVIDER_URL, conf
+          .get("hdfsproxy.ldap.provider.url"));
+
+      try {
+        lctx = new InitialLdapContext(env, null);
+      } catch (NamingException ne) {
+        throw new ServletException("NamingException in initializing ldap"
+            + ne.toString());
+      }
+
+      baseName = conf.get("hdfsproxy.ldap.role.base");
+      hdfsIpSchemaStr = conf.get("hdfsproxy.ldap.ip.schema.string",
+          "uniqueMember");
+      hdfsIpSchemaStrPrefix = conf.get(
+          "hdfsproxy.ldap.ip.schema.string.prefix", "cn=");
+      hdfsUidSchemaStr = conf.get("hdfsproxy.ldap.uid.schema.string", "uid");
+      hdfsGroupSchemaStr = conf.get("hdfsproxy.ldap.group.schema.string",
+          "userClass");
+      hdfsPathSchemaStr = conf.get("hdfsproxy.ldap.hdfs.path.schema.string",
+          "documentLocation");
+      paths = new ArrayList<String>();
+    }
+    LOG.info("LdapIpDirFilter initialization success: " + nn);
+  }
+
+  /** {@inheritDoc} */
+  public void destroy() {
+  }
+
+  /** {@inheritDoc} */
+  public void doFilter(ServletRequest request, ServletResponse response,
+      FilterChain chain) throws IOException, ServletException {
+
+    HttpServletRequest rqst = (HttpServletRequest) request;
+    HttpServletResponse rsp = (HttpServletResponse) response;
+
+    if (LOG.isDebugEnabled()) {
+      StringBuilder b = new StringBuilder("Request from ").append(
+          rqst.getRemoteHost()).append("/").append(rqst.getRemoteAddr())
+          .append(":").append(rqst.getRemotePort());
+      b.append("\n The Scheme is " + rqst.getScheme());
+      b.append("\n The Path Info is " + rqst.getPathInfo());
+      b.append("\n The Translated Path Info is " + rqst.getPathTranslated());
+      b.append("\n The Context Path is " + rqst.getContextPath());
+      b.append("\n The Query String is " + rqst.getQueryString());
+      b.append("\n The Request URI is " + rqst.getRequestURI());
+      b.append("\n The Request URL is " + rqst.getRequestURL());
+      b.append("\n The Servlet Path is " + rqst.getServletPath());
+      LOG.debug(b.toString());
+    }
+    // check ip address
+    String userIp = rqst.getRemoteAddr();
+    boolean isAuthorized = false;
+    try {
+      isAuthorized = checkUserIp(userIp);
+      if (!isAuthorized) {
+        rsp.sendError(HttpServletResponse.SC_FORBIDDEN,
+            "IP not authorized to access");
+        return;
+      }
+    } catch (NamingException ne) {
+      throw new IOException("NameingException in searching ldap"
+          + ne.toString());
+    }
+    // check request path
+    String servletPath = rqst.getServletPath();
+    if (HFTP_PATTERN.matcher(servletPath).matches()) {
+      // request is an HSFTP request
+      if (FILEPATH_PATTERN.matcher(servletPath).matches()) {
+        // file path as part of the URL
+        isAuthorized = checkHdfsPath(rqst.getPathInfo() != null ? rqst
+            .getPathInfo() : "/");
+      } else {
+        // file path is stored in "filename" parameter
+        isAuthorized = checkHdfsPath(rqst.getParameter("filename"));
+      }
+    }
+    if (!isAuthorized) {
+      rsp.sendError(HttpServletResponse.SC_FORBIDDEN,
+          "User not authorized to access path");
+      return;
+    }
+    UnixUserGroupInformation ugi = new UnixUserGroupInformation(userId,
+        groupName.split(","));
+    rqst.setAttribute("authorized.ugi", ugi);
+    // since we cannot pass ugi object cross context as they are from different
+    // classloaders in different war file, we have to use String attribute.
+    rqst.setAttribute("org.apache.hadoop.hdfsproxy.authorized.userID", userId);
+    rqst.setAttribute("org.apache.hadoop.hdfsproxy.authorized.role", groupName);
+    LOG.info("User: " + userId + " (" + groupName + ") Request: "
+        + rqst.getPathInfo() + " From: " + rqst.getRemoteAddr());
+    chain.doFilter(request, response);
+  }
+
+  /** check that client's ip is listed in the Ldap Roles */
+  @SuppressWarnings("unchecked")
+  private boolean checkUserIp(String userIp) throws NamingException {
+    String ipMember = hdfsIpSchemaStrPrefix + userIp;
+    Attributes matchAttrs = new BasicAttributes(true);
+    matchAttrs.put(new BasicAttribute(hdfsIpSchemaStr, ipMember));
+    matchAttrs.put(new BasicAttribute(hdfsUidSchemaStr));
+    matchAttrs.put(new BasicAttribute(hdfsPathSchemaStr));
+
+    String[] attrIDs = { hdfsUidSchemaStr, hdfsGroupSchemaStr,
+        hdfsPathSchemaStr };
+
+    NamingEnumeration<SearchResult> results = lctx.search(baseName, matchAttrs,
+        attrIDs);
+    if (results.hasMore()) {
+      SearchResult sr = results.next();
+      Attributes attrs = sr.getAttributes();
+      for (NamingEnumeration ne = attrs.getAll(); ne.hasMore();) {
+        Attribute attr = (Attribute) ne.next();
+        if (hdfsUidSchemaStr.equalsIgnoreCase(attr.getID())) {
+          userId = (String) attr.get();
+        } else if (hdfsGroupSchemaStr.equalsIgnoreCase(attr.getID())) {
+          groupName = (String) attr.get();
+        } else if (hdfsPathSchemaStr.equalsIgnoreCase(attr.getID())) {
+          for (NamingEnumeration e = attr.getAll(); e.hasMore();) {
+            paths.add((String) e.next());
+          }
+        }
+      }
+      return true;
+    }
+    LOG.info("Ip address " + userIp
+        + " is not authorized to access the proxy server");
+    return false;
+  }
+
+  /** check that the requested path is listed in the ldap entry */
+  private boolean checkHdfsPath(String pathInfo) {
+    if (pathInfo == null || pathInfo.length() == 0) {
+      LOG.info("Can't get file path from the request");
+      return false;
+    }
+    Path userPath = new Path(pathInfo);
+    while (userPath != null) {
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("\n Checking file path " + userPath);
+      }
+      if (paths.contains(userPath.toString()))
+        return true;
+      userPath = userPath.getParent();
+    }
+    LOG.info("User " + userId + " is not authorized to access " + pathInfo);
+    return false;
+  }
+}

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java?rev=788898&r1=788897&r2=788898&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java (original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java Fri Jun 26 22:48:23 2009
@@ -24,7 +24,6 @@
 import javax.servlet.ServletContext;
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
@@ -36,14 +35,14 @@
 public class ProxyFileDataServlet extends FileDataServlet {
   /** For java.io.Serializable */
   private static final long serialVersionUID = 1L;
-  
+
   /** {@inheritDoc} */
   @Override
   public void init() throws ServletException {
     ServletContext context = getServletContext();
-    if (context.getAttribute("name.conf") == null) { 
+    if (context.getAttribute("name.conf") == null) {
       context.setAttribute("name.conf", new Configuration());
-    }    
+    }
   }
 
   /** {@inheritDoc} */
@@ -59,8 +58,17 @@
   /** {@inheritDoc} */
   @Override
   protected UnixUserGroupInformation getUGI(HttpServletRequest request) {
-    String userID = (String) request.getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
-    UnixUserGroupInformation ugi = ProxyUgiManager.getUgiForUser(userID);
+    String userID = (String) request
+        .getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
+    String groupName = (String) request
+        .getAttribute("org.apache.hadoop.hdfsproxy.authorized.role");
+    UnixUserGroupInformation ugi;
+    if (groupName != null) {
+      // get group info from ldap
+      ugi = new UnixUserGroupInformation(userID, groupName.split(","));
+    } else {// stronger ugi management
+      ugi = ProxyUgiManager.getUgiForUser(userID);
+    }
     return ugi;
   }
 }

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyForwardServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyForwardServlet.java?rev=788898&r1=788897&r2=788898&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyForwardServlet.java (original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyForwardServlet.java Fri Jun 26 22:48:23 2009
@@ -17,17 +17,18 @@
  */
 package org.apache.hadoop.hdfsproxy;
 
+import java.io.IOException;
+
+import javax.servlet.RequestDispatcher;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
 import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
-import java.io.IOException;
-import javax.servlet.ServletException;
-import javax.servlet.ServletContext;
-import javax.servlet.RequestDispatcher;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.security.UnixUserGroupInformation;
 
 /**
  * 
@@ -40,51 +41,55 @@
   private static final long serialVersionUID = 1L;
   private static Configuration configuration = null;
   public static final Log LOG = LogFactory.getLog(ProxyForwardServlet.class);
-  
+
   /** {@inheritDoc} */
   @Override
   public void init() throws ServletException {
     ServletContext context = getServletContext();
-    configuration = (Configuration) context.getAttribute("org.apache.hadoop.hdfsproxy.conf");
+    configuration = (Configuration) context
+        .getAttribute("org.apache.hadoop.hdfsproxy.conf");
   }
-  
+
   /** {@inheritDoc} */
   @Override
   public void doGet(HttpServletRequest request, HttpServletResponse response)
-    throws IOException, ServletException {  
-    String hostname = request.getServerName(); 
-    
+      throws IOException, ServletException {
+    String hostname = request.getServerName();
+
     String version = configuration.get(hostname);
-    if (version != null) {
-      ServletContext curContext = getServletContext();
-      ServletContext dstContext = curContext.getContext(version);
-      
-      if (dstContext == null) {
-        LOG.info("Context non-exist or restricted from access: " + version);
-        response.sendError(HttpServletResponse.SC_NOT_FOUND);
-        return;
-      }
-      LOG.debug("Request to " + hostname + " is forwarded to version " + version);
-      forwardRequest(request, response, dstContext, request.getServletPath());
-
-    } else {
-      LOG.info("not a valid context path");
-      response.sendError(HttpServletResponse.SC_NOT_IMPLEMENTED); 
+    if (version == null) {
+      // extract from hostname directly
+      String[] strs = hostname.split("[-\\.]");
+      version = "/" + strs[0];
+    }
+
+    ServletContext curContext = getServletContext();
+    ServletContext dstContext = curContext.getContext(version);
+
+    if (dstContext == null) {
+      LOG.info("Context non-exist or restricted from access: " + version);
+      response.sendError(HttpServletResponse.SC_NOT_FOUND);
+      return;
     }
-  } 
+    LOG.debug("Request to " + hostname + " is forwarded to version " + version);
+    forwardRequest(request, response, dstContext, request.getServletPath());
+
+  }
+
   /** {@inheritDoc} */
-  public void forwardRequest(HttpServletRequest request, HttpServletResponse response, ServletContext context, String pathInfo) 
-    throws IOException, ServletException{
-    String path = buildForwardPath(request, pathInfo);    
+  public void forwardRequest(HttpServletRequest request,
+      HttpServletResponse response, ServletContext context, String pathInfo)
+      throws IOException, ServletException {
+    String path = buildForwardPath(request, pathInfo);
     RequestDispatcher dispatcher = context.getRequestDispatcher(path);
     if (dispatcher == null) {
-      LOG.info("There was no such dispatcher");
+      LOG.info("There was no such dispatcher: " + path);
       response.sendError(HttpServletResponse.SC_NO_CONTENT);
       return;
     }
     dispatcher.forward(request, response);
   }
-  
+
   /** {@inheritDoc} */
   protected String buildForwardPath(HttpServletRequest request, String pathInfo) {
     String path = pathInfo;

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java?rev=788898&r1=788897&r2=788898&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java (original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java Fri Jun 26 22:48:23 2009
@@ -29,21 +29,30 @@
 public class ProxyListPathsServlet extends ListPathsServlet {
   /** For java.io.Serializable */
   private static final long serialVersionUID = 1L;
-  
+
   /** {@inheritDoc} */
   @Override
   public void init() throws ServletException {
     ServletContext context = getServletContext();
-    if (context.getAttribute("name.conf") == null) { 
+    if (context.getAttribute("name.conf") == null) {
       context.setAttribute("name.conf", new Configuration());
-    }    
+    }
   }
 
   /** {@inheritDoc} */
   @Override
   protected UnixUserGroupInformation getUGI(HttpServletRequest request) {
-    String userID = (String) request.getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
-    UnixUserGroupInformation ugi = ProxyUgiManager.getUgiForUser(userID);
+    String userID = (String) request
+        .getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
+    String groupName = (String) request
+        .getAttribute("org.apache.hadoop.hdfsproxy.authorized.role");
+    UnixUserGroupInformation ugi;
+    if (groupName != null) {
+      // group info stored in ldap
+      ugi = new UnixUserGroupInformation(userID, groupName.split(","));
+    } else {// stronger ugi management
+      ugi = ProxyUgiManager.getUgiForUser(userID);
+    }
     return ugi;
   }
 }

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java?rev=788898&r1=788897&r2=788898&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java (original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java Fri Jun 26 22:48:23 2009
@@ -24,22 +24,23 @@
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.DFSClient;
 import org.apache.hadoop.hdfs.server.namenode.StreamFile;
 import org.apache.hadoop.security.UnixUserGroupInformation;
-import org.apache.hadoop.conf.Configuration;
 
 /** {@inheritDoc} */
 public class ProxyStreamFile extends StreamFile {
   /** For java.io.Serializable */
   private static final long serialVersionUID = 1L;
+
   /** {@inheritDoc} */
   @Override
   public void init() throws ServletException {
     ServletContext context = getServletContext();
-    if (context.getAttribute("name.conf") == null) { 
+    if (context.getAttribute("name.conf") == null) {
       context.setAttribute("name.conf", new Configuration());
-    }    
+    }
   }
 
   /** {@inheritDoc} */
@@ -59,8 +60,18 @@
   /** {@inheritDoc} */
   @Override
   protected UnixUserGroupInformation getUGI(HttpServletRequest request) {
-    String userID = (String) request.getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
-    UnixUserGroupInformation ugi = ProxyUgiManager.getUgiForUser(userID);
+    String userID = (String) request
+        .getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
+    String groupName = (String) request
+        .getAttribute("org.apache.hadoop.hdfsproxy.authorized.role");
+    UnixUserGroupInformation ugi;
+    if (groupName != null) {
+      // get group info from ldap
+      ugi = new UnixUserGroupInformation(userID, groupName.split(","));
+    } else {// stronger ugi management
+      ugi = ProxyUgiManager.getUgiForUser(userID);
+    }
     return ugi;
   }
+
 }

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java?rev=788898&r1=788897&r2=788898&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java (original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java Fri Jun 26 22:48:23 2009
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hdfsproxy;
 
+import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.net.HttpURLConnection;
@@ -25,13 +26,20 @@
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URL;
+import java.security.KeyStore;
 import java.security.cert.X509Certificate;
 import java.util.Date;
 import java.util.Set;
 
-import javax.net.ssl.HttpsURLConnection;
 import javax.net.ssl.HostnameVerifier;
+import javax.net.ssl.HttpsURLConnection;
+import javax.net.ssl.KeyManager;
+import javax.net.ssl.KeyManagerFactory;
+import javax.net.ssl.SSLContext;
 import javax.net.ssl.SSLSession;
+import javax.net.ssl.TrustManager;
+import javax.net.ssl.TrustManagerFactory;
+import javax.net.ssl.X509TrustManager;
 import javax.servlet.http.HttpServletResponse;
 
 import org.apache.commons.logging.Log;
@@ -43,17 +51,19 @@
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.util.HostsFileReader;
 
-
 /**
  * Proxy Utility .
  */
 public class ProxyUtil {
   public static final Log LOG = LogFactory.getLog(ProxyUtil.class);
   private static final long MM_SECONDS_PER_DAY = 1000 * 60 * 60 * 24;
-  private static final int CERT_EXPIRATION_WARNING_THRESHOLD = 30; // 30 days warning
-  
+  private static final int CERT_EXPIRATION_WARNING_THRESHOLD = 30; // 30 days
+
+  // warning
+
   private static enum UtilityOption {
-    RELOAD("-reloadPermFiles"), CLEAR("-clearUgiCache"), GET("-get"), CHECKCERTS("-checkcerts");
+    RELOAD("-reloadPermFiles"), CLEAR("-clearUgiCache"), GET("-get"), CHECKCERTS(
+        "-checkcerts");
 
     private String name = null;
 
@@ -65,7 +75,7 @@
       return name;
     }
   }
-  
+
   /**
    * Dummy hostname verifier that is used to bypass hostname checking
    */
@@ -75,6 +85,21 @@
     }
   }
 
+  /**
+   * Dummy trustmanager that is used to bypass server certificate checking
+   */
+  private static class DummyTrustManager implements X509TrustManager {
+    public void checkClientTrusted(X509Certificate[] chain, String authType) {
+    }
+
+    public void checkServerTrusted(X509Certificate[] chain, String authType) {
+    }
+
+    public X509Certificate[] getAcceptedIssuers() {
+      return null;
+    }
+  }
+
   private static HttpsURLConnection openConnection(String hostname, int port,
       String path) throws IOException {
     try {
@@ -90,21 +115,53 @@
     }
   }
 
-  private static void setupSslProps(Configuration conf) {
-    System.setProperty("javax.net.ssl.trustStore", conf
-        .get("ssl.client.truststore.location"));
-    System.setProperty("javax.net.ssl.trustStorePassword", conf.get(
-        "ssl.client.truststore.password", ""));
-    System.setProperty("javax.net.ssl.trustStoreType", conf.get(
-        "ssl.client.truststore.type", "jks"));
-    System.setProperty("javax.net.ssl.keyStore", conf
-        .get("ssl.client.keystore.location"));
-    System.setProperty("javax.net.ssl.keyStorePassword", conf.get(
-        "ssl.client.keystore.password", ""));
-    System.setProperty("javax.net.ssl.keyPassword", conf.get(
-        "ssl.client.keystore.keypassword", ""));
-    System.setProperty("javax.net.ssl.keyStoreType", conf.get(
-        "ssl.client.keystore.type", "jks"));
+  private static void setupSslProps(Configuration conf) throws IOException {
+    FileInputStream fis = null;
+    try {
+      SSLContext sc = SSLContext.getInstance("SSL");
+      KeyManager[] kms = null;
+      TrustManager[] tms = null;
+      if (conf.get("ssl.client.keystore.location") != null) {
+        // initialize default key manager with keystore file and pass
+        KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509");
+        KeyStore ks = KeyStore.getInstance(conf.get("ssl.client.keystore.type",
+            "JKS"));
+        char[] ksPass = conf.get("ssl.client.keystore.password", "changeit")
+            .toCharArray();
+        fis = new FileInputStream(conf.get("ssl.client.keystore.location",
+            "keystore.jks"));
+        ks.load(fis, ksPass);
+        kmf.init(ks, conf.get("ssl.client.keystore.keypassword", "changeit")
+            .toCharArray());
+        kms = kmf.getKeyManagers();
+        fis.close();
+        fis = null;
+      }
+      // initialize default trust manager with keystore file and pass
+      if (conf.getBoolean("ssl.client.do.not.authenticate.server", false)) {
+        // by pass trustmanager validation
+        tms = new DummyTrustManager[] { new DummyTrustManager() };
+      } else {
+        TrustManagerFactory tmf = TrustManagerFactory.getInstance("PKIX");
+        KeyStore ts = KeyStore.getInstance(conf.get(
+            "ssl.client.truststore.type", "JKS"));
+        char[] tsPass = conf.get("ssl.client.truststore.password", "changeit")
+            .toCharArray();
+        fis = new FileInputStream(conf.get("ssl.client.truststore.location",
+            "truststore.jks"));
+        ts.load(fis, tsPass);
+        tmf.init(ts);
+        tms = tmf.getTrustManagers();
+      }
+      sc.init(kms, tms, new java.security.SecureRandom());
+      HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory());
+    } catch (Exception e) {
+      throw new IOException("Could not initialize SSLContext", e);
+    } finally {
+      if (fis != null) {
+        fis.close();
+      }
+    }
   }
 
   static InetSocketAddress getSslAddr(Configuration conf) throws IOException {
@@ -121,31 +178,33 @@
     int err = 0;
     StringBuilder b = new StringBuilder();
 
-    HostsFileReader hostsReader = new HostsFileReader(conf.get("hdfsproxy.hosts",
-        "hdfsproxy-hosts"), "");
+    HostsFileReader hostsReader = new HostsFileReader(conf.get(
+        "hdfsproxy.hosts", "hdfsproxy-hosts"), "");
     Set<String> hostsList = hostsReader.getHosts();
     for (String hostname : hostsList) {
       HttpsURLConnection connection = null;
       try {
-        connection = openConnection(hostname, sslPort, path);  
-        connection.connect(); 
+        connection = openConnection(hostname, sslPort, path);
+        connection.connect();
         if (LOG.isDebugEnabled()) {
           StringBuffer sb = new StringBuffer();
-          X509Certificate[] clientCerts = (X509Certificate[]) connection.getLocalCertificates();
+          X509Certificate[] clientCerts = (X509Certificate[]) connection
+              .getLocalCertificates();
           if (clientCerts != null) {
             for (X509Certificate cert : clientCerts)
               sb.append("\n Client certificate Subject Name is "
                   + cert.getSubjectX500Principal().getName());
           } else {
-            sb.append("\n No client certificates were found");  
+            sb.append("\n No client certificates were found");
           }
-          X509Certificate[] serverCerts = (X509Certificate[]) connection.getServerCertificates();
+          X509Certificate[] serverCerts = (X509Certificate[]) connection
+              .getServerCertificates();
           if (serverCerts != null) {
             for (X509Certificate cert : serverCerts)
               sb.append("\n Server certificate Subject Name is "
                   + cert.getSubjectX500Principal().getName());
           } else {
-            sb.append("\n No server certificates were found");  
+            sb.append("\n No server certificates were found");
           }
           LOG.debug(sb.toString());
         }
@@ -156,7 +215,8 @@
         }
       } catch (IOException e) {
         b.append("\n\t" + hostname + ": " + e.getLocalizedMessage());
-        if (LOG.isDebugEnabled()) e.printStackTrace();
+        if (LOG.isDebugEnabled())
+          LOG.debug("Exception happend for host " + hostname, e);
         err++;
       } finally {
         if (connection != null)
@@ -164,65 +224,73 @@
       }
     }
     if (err > 0) {
-      System.err.print("Command failed on the following "
-          + err + " host" + (err==1?":":"s:") + b.toString() + "\n");
+      System.err.print("Command failed on the following " + err + " host"
+          + (err == 1 ? ":" : "s:") + b.toString() + "\n");
       return false;
     }
     return true;
   }
-  
-  
-  static FSDataInputStream open(Configuration conf, String hostname, int port, String path) throws IOException {
+
+  static FSDataInputStream open(Configuration conf, String hostname, int port,
+      String path) throws IOException {
     setupSslProps(conf);
     HttpURLConnection connection = null;
     connection = openConnection(hostname, port, path);
     connection.connect();
     final InputStream in = connection.getInputStream();
     return new FSDataInputStream(new FSInputStream() {
-        public int read() throws IOException {
-          return in.read();
-        }
-        public int read(byte[] b, int off, int len) throws IOException {
-          return in.read(b, off, len);
-        }
+      public int read() throws IOException {
+        return in.read();
+      }
 
-        public void close() throws IOException {
-          in.close();
-        }
+      public int read(byte[] b, int off, int len) throws IOException {
+        return in.read(b, off, len);
+      }
 
-        public void seek(long pos) throws IOException {
-          throw new IOException("Can't seek!");
-        }
-        public long getPos() throws IOException {
-          throw new IOException("Position unknown!");
-        }
-        public boolean seekToNewSource(long targetPos) throws IOException {
-          return false;
-        }
-      });
+      public void close() throws IOException {
+        in.close();
+      }
+
+      public void seek(long pos) throws IOException {
+        throw new IOException("Can't seek!");
+      }
+
+      public long getPos() throws IOException {
+        throw new IOException("Position unknown!");
+      }
+
+      public boolean seekToNewSource(long targetPos) throws IOException {
+        return false;
+      }
+    });
   }
-  
-  static void checkServerCertsExpirationDays(Configuration conf, String hostname, int port) throws IOException {
+
+  static void checkServerCertsExpirationDays(Configuration conf,
+      String hostname, int port) throws IOException {
     setupSslProps(conf);
     HttpsURLConnection connection = null;
     connection = openConnection(hostname, port, null);
     connection.connect();
-    X509Certificate[] serverCerts = (X509Certificate[]) connection.getServerCertificates();
+    X509Certificate[] serverCerts = (X509Certificate[]) connection
+        .getServerCertificates();
     Date curDate = new Date();
     long curTime = curDate.getTime();
     if (serverCerts != null) {
       for (X509Certificate cert : serverCerts) {
         StringBuffer sb = new StringBuffer();
-        sb.append("\n Server certificate Subject Name: " + cert.getSubjectX500Principal().getName());
+        sb.append("\n Server certificate Subject Name: "
+            + cert.getSubjectX500Principal().getName());
         Date expDate = cert.getNotAfter();
         long expTime = expDate.getTime();
-        int dayOffSet = (int) ((expTime - curTime)/MM_SECONDS_PER_DAY);
+        int dayOffSet = (int) ((expTime - curTime) / MM_SECONDS_PER_DAY);
         sb.append(" have " + dayOffSet + " days to expire");
-        if (dayOffSet < CERT_EXPIRATION_WARNING_THRESHOLD) LOG.warn(sb.toString());
-        else LOG.info(sb.toString());
+        if (dayOffSet < CERT_EXPIRATION_WARNING_THRESHOLD)
+          LOG.warn(sb.toString());
+        else
+          LOG.info(sb.toString());
       }
     } else {
-      LOG.info("\n No Server certs was found");  
+      LOG.info("\n No Server certs was found");
     }
 
     if (connection != null) {
@@ -231,24 +299,23 @@
   }
 
   public static void main(String[] args) throws Exception {
-    if(args.length < 1 || 
-        (!UtilityOption.RELOAD.getName().equalsIgnoreCase(args[0]) 
+    if (args.length < 1
+        || (!UtilityOption.RELOAD.getName().equalsIgnoreCase(args[0])
             && !UtilityOption.CLEAR.getName().equalsIgnoreCase(args[0])
-            && !UtilityOption.GET.getName().equalsIgnoreCase(args[0])
-            && !UtilityOption.CHECKCERTS.getName().equalsIgnoreCase(args[0])) ||
-            (UtilityOption.GET.getName().equalsIgnoreCase(args[0]) && args.length != 4) ||
-            (UtilityOption.CHECKCERTS.getName().equalsIgnoreCase(args[0]) && args.length != 3)) {
-      System.err.println("Usage: ProxyUtil ["
-          + UtilityOption.RELOAD.getName() + "] | ["
-          + UtilityOption.CLEAR.getName() + "] | ["
+            && !UtilityOption.GET.getName().equalsIgnoreCase(args[0]) && !UtilityOption.CHECKCERTS
+            .getName().equalsIgnoreCase(args[0]))
+        || (UtilityOption.GET.getName().equalsIgnoreCase(args[0]) && args.length != 4)
+        || (UtilityOption.CHECKCERTS.getName().equalsIgnoreCase(args[0]) && args.length != 3)) {
+      System.err.println("Usage: ProxyUtil [" + UtilityOption.RELOAD.getName()
+          + "] | [" + UtilityOption.CLEAR.getName() + "] | ["
           + UtilityOption.GET.getName() + " <hostname> <#port> <path> ] | ["
           + UtilityOption.CHECKCERTS.getName() + " <hostname> <#port> ]");
-      System.exit(0);      
+      System.exit(0);
     }
-    Configuration conf = new Configuration(false);   
+    Configuration conf = new Configuration(false);
     conf.addResource("ssl-client.xml");
     conf.addResource("hdfsproxy-default.xml");
-     
+
     if (UtilityOption.RELOAD.getName().equalsIgnoreCase(args[0])) {
       // reload user-certs.xml and user-permissions.xml files
       sendCommand(conf, "/reloadPermFiles");
@@ -266,5 +333,5 @@
       in.close();
     }
   }
-        
+
 }

Added: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/DummyLdapContext.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/DummyLdapContext.java?rev=788898&view=auto
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/DummyLdapContext.java (added)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/DummyLdapContext.java Fri Jun 26 22:48:23 2009
@@ -0,0 +1,128 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfsproxy;
+
+import java.util.ArrayList;
+import java.util.Hashtable;
+
+import javax.naming.NamingEnumeration;
+import javax.naming.NamingException;
+import javax.naming.directory.Attribute;
+import javax.naming.directory.Attributes;
+import javax.naming.directory.BasicAttribute;
+import javax.naming.directory.BasicAttributes;
+import javax.naming.directory.SearchResult;
+import javax.naming.ldap.Control;
+import javax.naming.ldap.InitialLdapContext;
+
+class DummyLdapContext extends InitialLdapContext {
+  class ResultEnum<T> implements NamingEnumeration<T> {
+    private ArrayList<T> rl;
+
+    public ResultEnum() {
+      rl = new ArrayList<T>();
+    }
+
+    public ResultEnum(ArrayList<T> al) {
+      rl = al;
+    }
+
+    public boolean hasMoreElements() {
+      return !rl.isEmpty();
+    }
+
+    public T nextElement() {
+      T t = rl.get(0);
+      rl.remove(0);
+      return t;
+    }
+
+    public boolean hasMore() throws NamingException {
+      return !rl.isEmpty();
+    }
+
+    public T next() throws NamingException {
+      T t = rl.get(0);
+      rl.remove(0);
+      return t;
+    }
+
+    public void close() throws NamingException {
+    }
+  }
+
+  public DummyLdapContext() throws NamingException {
+  }
+
+  public DummyLdapContext(Hashtable<?, ?> environment, Control[] connCtls)
+      throws NamingException {
+  }
+
+  public NamingEnumeration<SearchResult> search(String name,
+      Attributes matchingAttributes, String[] attributesToReturn)
+      throws NamingException {
+    System.out.println("Searching Dummy LDAP Server Results:");
+    if (!"ou=proxyroles,dc=mycompany,dc=com".equalsIgnoreCase(name)) {
+      System.out.println("baseName mismatch");
+      return new ResultEnum<SearchResult>();
+    }
+    if (!"cn=127.0.0.1".equals((String) matchingAttributes.get("uniqueMember")
+        .get())) {
+      System.out.println("Ip address mismatch");
+      return new ResultEnum<SearchResult>();
+    }
+    BasicAttributes attrs = new BasicAttributes();
+    BasicAttribute uidAttr = new BasicAttribute("uid", "testuser");
+    attrs.put(uidAttr);
+    BasicAttribute groupAttr = new BasicAttribute("userClass", "testgroup");
+    attrs.put(groupAttr);
+    BasicAttribute locAttr = new BasicAttribute("documentLocation", "/testdir");
+    attrs.put(locAttr);
+    SearchResult sr = new SearchResult(null, null, attrs);
+    ArrayList<SearchResult> al = new ArrayList<SearchResult>();
+    al.add(sr);
+    NamingEnumeration<SearchResult> ne = new ResultEnum<SearchResult>(al);
+    return ne;
+  }
+
+  @SuppressWarnings("unchecked")
+  public static void main(String[] args) throws Exception {
+    DummyLdapContext dlc = new DummyLdapContext();
+    String baseName = "ou=proxyroles,dc=mycompany,dc=com";
+    Attributes matchAttrs = new BasicAttributes(true);
+    String[] attrIDs = { "uid", "documentLocation" };
+    NamingEnumeration<SearchResult> results = dlc.search(baseName, matchAttrs,
+        attrIDs);
+    if (results.hasMore()) {
+      SearchResult sr = results.next();
+      Attributes attrs = sr.getAttributes();
+      for (NamingEnumeration ne = attrs.getAll(); ne.hasMore();) {
+        Attribute attr = (Attribute) ne.next();
+        if ("uid".equalsIgnoreCase(attr.getID())) {
+          System.out.println("User ID = " + attr.get());
+        } else if ("documentLocation".equalsIgnoreCase(attr.getID())) {
+          System.out.println("Document Location = ");
+          for (NamingEnumeration e = attr.getAll(); e.hasMore();) {
+            System.out.println(e.next());
+          }
+        }
+      }
+    }
+  }
+}

Added: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestLdapIpDirFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestLdapIpDirFilter.java?rev=788898&view=auto
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestLdapIpDirFilter.java (added)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestLdapIpDirFilter.java Fri Jun 26 22:48:23 2009
@@ -0,0 +1,139 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfsproxy;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+
+import javax.naming.NamingException;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+
+import org.apache.cactus.FilterTestCase;
+import org.apache.cactus.WebRequest;
+import org.apache.cactus.WebResponse;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+public class TestLdapIpDirFilter extends FilterTestCase {
+
+  public static final Log LOG = LogFactory.getLog(TestLdapIpDirFilter.class);
+
+  private class DummyFilterChain implements FilterChain {
+    public void doFilter(ServletRequest theRequest, ServletResponse theResponse)
+        throws IOException, ServletException {
+      PrintWriter writer = theResponse.getWriter();
+
+      writer.print("<p>some content</p>");
+      writer.close();
+    }
+
+    public void init(FilterConfig theConfig) {
+    }
+
+    public void destroy() {
+    }
+  }
+
+  public void testIpRestriction() throws ServletException, IOException,
+      NamingException {
+    LdapIpDirFilter filter = new LdapIpDirFilter();
+    String baseName = "ou=proxyroles,dc=mycompany,dc=com";
+    DummyLdapContext dlc = new DummyLdapContext();
+    filter.initialize(baseName, dlc);
+    request.setRemoteIPAddress("127.0.0.2");
+    request.removeAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
+    FilterChain mockFilterChain = new DummyFilterChain();
+    filter.doFilter(request, response, mockFilterChain);
+    assertNull(request
+        .getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID"));
+  }
+
+  public void endIpRestriction(WebResponse theResponse) {
+    assertEquals(theResponse.getStatusCode(), 403);
+    assertTrue("Text missing 'IP not authorized to access' : : ["
+        + theResponse.getText() + "]", theResponse.getText().indexOf(
+        "IP not authorized to access") > 0);
+  }
+
+  public void beginPathRestriction(WebRequest theRequest) {
+    theRequest.setURL("proxy-test:0", null, "/streamFile", null,
+        "filename=/nontestdir");
+  }
+
+  public void testPathRestriction() throws ServletException, IOException,
+      NamingException {
+    LdapIpDirFilter filter = new LdapIpDirFilter();
+    String baseName = "ou=proxyroles,dc=mycompany,dc=com";
+    DummyLdapContext dlc = new DummyLdapContext();
+    filter.initialize(baseName, dlc);
+    request.setRemoteIPAddress("127.0.0.1");
+    request.removeAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
+    FilterChain mockFilterChain = new DummyFilterChain();
+    filter.doFilter(request, response, mockFilterChain);
+    assertNull(request
+        .getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID"));
+  }
+
+  public void endPathRestriction(WebResponse theResponse) {
+    assertEquals(theResponse.getStatusCode(), 403);
+    assertTrue("Text missing 'User not authorized to access path' : : ["
+        + theResponse.getText() + "]", theResponse.getText().indexOf(
+        "User not authorized to access path") > 0);
+  }
+
+  public void beginDoFilter(WebRequest theRequest) {
+    theRequest.setURL("proxy-test:0", null, "/streamFile", null,
+        "filename=/testdir");
+  }
+
+  public void testDoFilter() throws ServletException, IOException,
+      NamingException {
+    LdapIpDirFilter filter = new LdapIpDirFilter();
+    String baseName = "ou=proxyroles,dc=mycompany,dc=com";
+    DummyLdapContext dlc = new DummyLdapContext();
+    filter.initialize(baseName, dlc);
+    request.setRemoteIPAddress("127.0.0.1");
+
+    ServletContext context = config.getServletContext();
+    context.removeAttribute("name.node.address");
+    context.removeAttribute("name.conf");
+    assertNull(context.getAttribute("name.node.address"));
+    assertNull(context.getAttribute("name.conf"));
+    filter.init(config);
+    assertNotNull(context.getAttribute("name.node.address"));
+    assertNotNull(context.getAttribute("name.conf"));
+
+    request.removeAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
+    FilterChain mockFilterChain = new DummyFilterChain();
+    filter.doFilter(request, response, mockFilterChain);
+    assertEquals(request
+        .getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID"),
+        "testuser");
+
+  }
+
+  public void endDoFilter(WebResponse theResponse) {
+    assertEquals("<p>some content</p>", theResponse.getText());
+  }
+
+}

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/hdfsproxy-default.xml
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/hdfsproxy-default.xml?rev=788898&r1=788897&r2=788898&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/hdfsproxy-default.xml (original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/hdfsproxy-default.xml Fri Jun 26 22:48:23 2009
@@ -70,5 +70,35 @@
   </description>
 </property>
 
+<property>
+  <name>hdfsproxy.ldap.initial.context.factory</name>
+  <value>com.sun.jndi.ldap.LdapCtxFactory</value>
+  <description> ldap initial context factory
+  </description>
+</property>
+
+<property>
+  <name>hdfsproxy.ldap.provider.url</name>
+  <value>ldap://ldapserver:389</value>
+  <description> ldap server address
+  </description>
+</property>
+
+<property>
+  <name>hdfsproxy.ldap.role.base</name>
+  <value>ou=proxyroles,dc=mycompany,dc=com</value>
+  <description> ldap role base
+  </description>
+</property>
+
+<property>
+  <name>fs.default.name</name>
+  <!-- cluster variant -->
+  <value>hdfs://localhost:8020</value>
+  <description>The name of the default file system.  Either the
+  literal string "local" or a host:port for NDFS.</description>
+  <final>true</final>
+</property>
+
 </configuration>
 

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/user-certs.xml
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/user-certs.xml?rev=788898&r1=788897&r2=788898&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/user-certs.xml (original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config/user-certs.xml Fri Jun 26 22:48:23 2009
@@ -75,7 +75,7 @@
 
 <property>
   <name> Admin </name>
-  <value>, 5,  ,,  3 , 9a2cf0be9ddf8280
+  <value>, 6,  ,,  3 , 9a2cf0be9ddf8280
 
 
 

Added: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/resources/tomcat-web.xml
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/resources/tomcat-web.xml?rev=788898&view=auto
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/resources/tomcat-web.xml (added)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/resources/tomcat-web.xml Fri Jun 26 22:48:23 2009
@@ -0,0 +1,154 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+
+<!DOCTYPE web-app 
+    PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN" 
+    "http://java.sun.com/dtd/web-app_2_3.dtd">
+
+<web-app>
+
+
+    <!-- General description of your web application -->
+
+    <display-name>HDFS Proxy</display-name>
+    <description>
+      get data from grid 
+    </description>
+
+
+    <!-- Context initialization parameters that define shared
+         String constants used within your application, which
+         can be customized by the system administrator who is
+         installing your application.  The values actually
+         assigned to these parameters can be retrieved in a
+         servlet or JSP page by calling:
+
+             String value =
+               getServletContext().getInitParameter("name");
+
+         where "name" matches the <param-name> element of
+         one of these initialization parameters.
+
+         You can define any number of context initialization
+         parameters, including zero.
+    -->
+
+    <context-param>
+      <param-name>webmaster</param-name>
+      <param-value>zhiyong1@yahoo-inc.com</param-value>
+      <description>
+        The EMAIL address of the administrator to whom questions
+        and comments about this application should be addressed.
+      </description>
+    </context-param>
+    
+     	
+
+
+    <!-- Servlet definitions for the servlets that make up
+         your web application, including initialization
+         parameters.  With Tomcat, you can also send requests
+         to servlets not listed here with a request like this:
+
+           http://localhost:8080/{context-path}/servlet/{classname}
+
+         but this usage is not guaranteed to be portable.  It also
+         makes relative references to images and other resources
+         required by your servlet more complicated, so defining
+         all of your servlets (and defining a mapping to them with
+         a servlet-mapping element) is recommended.
+
+         Servlet initialization parameters can be retrieved in a
+         servlet or JSP page by calling:
+
+             String value =
+               getServletConfig().getInitParameter("name");
+
+         where "name" matches the <param-name> element of
+         one of these initialization parameters.
+
+         You can define any number of servlets, including zero.
+    -->
+
+		 <filter>
+	        <filter-name>proxyFilter</filter-name>
+	        <filter-class>org.apache.hadoop.hdfsproxy.ProxyFilter</filter-class>
+	   </filter>
+
+    <filter-mapping>
+        <filter-name>proxyFilter</filter-name>
+        <url-pattern>/*</url-pattern>
+    </filter-mapping>
+    
+    <servlet>
+    	<servlet-name>listPaths</servlet-name>
+      <description>list paths data access</description>
+      <servlet-class>org.apache.hadoop.hdfsproxy.ProxyListPathsServlet</servlet-class>
+    </servlet>
+    
+    <servlet-mapping>
+        <servlet-name>listPaths</servlet-name>
+        <url-pattern>/listPaths/*</url-pattern>
+    </servlet-mapping>
+
+		<servlet>
+    	<servlet-name>data</servlet-name>
+      <description>data access</description>
+      <servlet-class>org.apache.hadoop.hdfsproxy.ProxyFileDataServlet</servlet-class>
+    </servlet>
+    
+	  <servlet-mapping>
+        <servlet-name>data</servlet-name>
+        <url-pattern>/data/*</url-pattern>
+    </servlet-mapping>
+    
+    <servlet>
+    	<servlet-name>streamFile</servlet-name>
+      <description>stream file access</description>
+      <servlet-class>org.apache.hadoop.hdfsproxy.ProxyStreamFile</servlet-class>
+    </servlet>
+    
+    <servlet-mapping>
+        <servlet-name>streamFile</servlet-name>
+        <url-pattern>/streamFile/*</url-pattern>
+    </servlet-mapping>
+    
+
+		<welcome-file-list>
+		  <welcome-file>index.html</welcome-file>
+		</welcome-file-list>
+
+    <!-- Define the default session timeout for your application,
+         in minutes.  From a servlet or JSP page, you can modify
+         the timeout for a particular session dynamically by using
+         HttpSession.getMaxInactiveInterval(). -->
+
+    <session-config>
+      <session-timeout>30</session-timeout>    <!-- 30 minutes -->
+    </session-config>    
+
+
+</web-app>
+
+
+
+
+
+
+
+

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HsftpFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HsftpFileSystem.java?rev=788898&r1=788897&r2=788898&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HsftpFileSystem.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HsftpFileSystem.java Fri Jun 26 22:48:23 2009
@@ -18,33 +18,40 @@
 
 package org.apache.hadoop.hdfs;
 
+import java.io.FileInputStream;
 import java.io.IOException;
 import java.net.HttpURLConnection;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URL;
+import java.security.KeyStore;
 import java.security.cert.X509Certificate;
 
 import javax.net.ssl.HostnameVerifier;
 import javax.net.ssl.HttpsURLConnection;
+import javax.net.ssl.KeyManager;
+import javax.net.ssl.KeyManagerFactory;
+import javax.net.ssl.SSLContext;
 import javax.net.ssl.SSLSession;
+import javax.net.ssl.TrustManager;
+import javax.net.ssl.TrustManagerFactory;
+import javax.net.ssl.X509TrustManager;
 
 import org.apache.hadoop.conf.Configuration;
 
-
-
-/** An implementation of a protocol for accessing filesystems over HTTPS.
- * The following implementation provides a limited, read-only interface
- * to a filesystem over HTTPS.
+/**
+ * An implementation of a protocol for accessing filesystems over HTTPS. The
+ * following implementation provides a limited, read-only interface to a
+ * filesystem over HTTPS.
+ * 
  * @see org.apache.hadoop.hdfs.server.namenode.ListPathsServlet
  * @see org.apache.hadoop.hdfs.server.namenode.FileDataServlet
  */
 public class HsftpFileSystem extends HftpFileSystem {
 
   private static final long MM_SECONDS_PER_DAY = 1000 * 60 * 60 * 24;
-  private volatile int ExpWarnDays = 0;  
-  
-  
+  private volatile int ExpWarnDays = 0;
+
   @Override
   public void initialize(URI name, Configuration conf) throws IOException {
     super.initialize(name, conf);
@@ -52,40 +59,76 @@
     ExpWarnDays = conf.getInt("ssl.expiration.warn.days", 30);
   }
 
-  /** Set up SSL resources */
-  private static void setupSsl(Configuration conf) {
+  /**
+   * Set up SSL resources
+   * 
+   * @throws IOException
+   */
+  private static void setupSsl(Configuration conf) throws IOException {
     Configuration sslConf = new Configuration(false);
     sslConf.addResource(conf.get("dfs.https.client.keystore.resource",
         "ssl-client.xml"));
-    System.setProperty("javax.net.ssl.trustStore", sslConf.get(
-        "ssl.client.truststore.location", ""));
-    System.setProperty("javax.net.ssl.trustStorePassword", sslConf.get(
-        "ssl.client.truststore.password", ""));
-    System.setProperty("javax.net.ssl.trustStoreType", sslConf.get(
-        "ssl.client.truststore.type", "jks"));
-    System.setProperty("javax.net.ssl.keyStore", sslConf.get(
-        "ssl.client.keystore.location", ""));
-    System.setProperty("javax.net.ssl.keyStorePassword", sslConf.get(
-        "ssl.client.keystore.password", ""));
-    System.setProperty("javax.net.ssl.keyPassword", sslConf.get(
-        "ssl.client.keystore.keypassword", ""));
-    System.setProperty("javax.net.ssl.keyStoreType", sslConf.get(
-        "ssl.client.keystore.type", "jks"));
+    FileInputStream fis = null;
+    try {
+      SSLContext sc = SSLContext.getInstance("SSL");
+      KeyManager[] kms = null;
+      TrustManager[] tms = null;
+      if (sslConf.get("ssl.client.keystore.location") != null) {
+        // initialize default key manager with keystore file and pass
+        KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509");
+        KeyStore ks = KeyStore.getInstance(sslConf.get(
+            "ssl.client.keystore.type", "JKS"));
+        char[] ksPass = sslConf.get("ssl.client.keystore.password", "changeit")
+            .toCharArray();
+        fis = new FileInputStream(sslConf.get("ssl.client.keystore.location",
+            "keystore.jks"));
+        ks.load(fis, ksPass);
+        kmf.init(ks, sslConf.get("ssl.client.keystore.keypassword", "changeit")
+            .toCharArray());
+        kms = kmf.getKeyManagers();
+        fis.close();
+        fis = null;
+      }
+      // initialize default trust manager with truststore file and pass
+      if (conf.getBoolean("ssl.client.do.not.authenticate.server", false)) {
+        // by pass trustmanager validation
+        tms = new DummyTrustManager[] { new DummyTrustManager() };
+      } else {
+        TrustManagerFactory tmf = TrustManagerFactory.getInstance("PKIX");
+        KeyStore ts = KeyStore.getInstance(sslConf.get(
+            "ssl.client.truststore.type", "JKS"));
+        char[] tsPass = sslConf.get("ssl.client.truststore.password",
+            "changeit").toCharArray();
+        fis = new FileInputStream(sslConf.get("ssl.client.truststore.location",
+            "truststore.jks"));
+        ts.load(fis, tsPass);
+        tmf.init(ts);
+        tms = tmf.getTrustManagers();
+      }
+      sc.init(kms, tms, new java.security.SecureRandom());
+      HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory());
+    } catch (Exception e) {
+      throw new IOException("Could not initialize SSLContext", e);
+    } finally {
+      if (fis != null) {
+        fis.close();
+      }
+    }
   }
-  
+
   @Override
   protected HttpURLConnection openConnection(String path, String query)
       throws IOException {
     try {
-      final URL url = new URI("https", null, nnAddr.getHostName(),
-          nnAddr.getPort(), path, query, null).toURL();
-      HttpsURLConnection conn = (HttpsURLConnection)url.openConnection();
+      final URL url = new URI("https", null, nnAddr.getHostName(), nnAddr
+          .getPort(), path, query, null).toURL();
+      HttpsURLConnection conn = (HttpsURLConnection) url.openConnection();
       // bypass hostname verification
       conn.setHostnameVerifier(new DummyHostnameVerifier());
       conn.setRequestMethod("GET");
       conn.connect();
-     
-   // check cert expiration date
+
+      // check cert expiration date
       final int warnDays = ExpWarnDays;
       if (warnDays > 0) { // make sure only check once
         ExpWarnDays = 0;
@@ -100,16 +143,16 @@
               StringBuffer sb = new StringBuffer();
               sb.append("\n Client certificate "
                   + cert.getSubjectX500Principal().getName());
-              int dayOffSet = (int) ((expTime - System.currentTimeMillis())/MM_SECONDS_PER_DAY);
+              int dayOffSet = (int) ((expTime - System.currentTimeMillis()) / MM_SECONDS_PER_DAY);
               sb.append(" have " + dayOffSet + " days to expire");
               LOG.warn(sb.toString());
             }
           }
-        }        
+        }
       }
-      return (HttpURLConnection)conn;
+      return (HttpURLConnection) conn;
     } catch (URISyntaxException e) {
-      throw (IOException)new IOException().initCause(e);
+      throw (IOException) new IOException().initCause(e);
     }
   }
 
@@ -117,10 +160,10 @@
   public URI getUri() {
     try {
       return new URI("hsftp", null, nnAddr.getHostName(), nnAddr.getPort(),
-                     null, null, null);
+          null, null, null);
     } catch (URISyntaxException e) {
       return null;
-    } 
+    }
   }
 
   /**
@@ -132,4 +175,19 @@
     }
   }
 
+  /**
+   * Dummy trustmanager that is used to trust all server certificates
+   */
+  protected static class DummyTrustManager implements X509TrustManager {
+    public void checkClientTrusted(X509Certificate[] chain, String authType) {
+    }
+
+    public void checkServerTrusted(X509Certificate[] chain, String authType) {
+    }
+
+    public X509Certificate[] getAcceptedIssuers() {
+      return null;
+    }
+  }
+
 }



Mime
View raw message