hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cdoug...@apache.org
Subject svn commit: r712305 [2/2] - in /hadoop/core/trunk: ./ src/contrib/ src/contrib/hdfsproxy/ src/contrib/hdfsproxy/bin/ src/contrib/hdfsproxy/conf/ src/contrib/hdfsproxy/lib/ src/contrib/hdfsproxy/src/ src/contrib/hdfsproxy/src/java/ src/contrib/hdfsproxy...
Date Fri, 07 Nov 2008 23:02:32 GMT
Added: hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/HdfsProxy.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/HdfsProxy.java?rev=712305&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/HdfsProxy.java (added)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/HdfsProxy.java Fri Nov  7 15:02:30 2008
@@ -0,0 +1,293 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfsproxy;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.util.Set;
+
+import javax.net.ssl.HttpsURLConnection;
+import javax.net.ssl.HostnameVerifier;
+import javax.net.ssl.SSLSession;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.util.HostsFileReader;
+import org.apache.hadoop.util.StringUtils;
+
+/**
+ * A HTTPS/SSL proxy to HDFS, implementing certificate based access control.
+ */
+public class HdfsProxy {
+  public static final Log LOG = LogFactory.getLog(HdfsProxy.class);
+
+  private ProxyHttpServer server;
+  private InetSocketAddress sslAddr;
+  
+  /** Construct a proxy from the given configuration */
+  public HdfsProxy(Configuration conf) throws IOException {
+    try {
+      initialize(conf);
+    } catch (IOException e) {
+      this.stop();
+      throw e;
+    }
+  }
+  
+  private void initialize(Configuration conf) throws IOException {
+    sslAddr = getSslAddr(conf);
+    String nn = conf.get("hdfsproxy.dfs.namenode.address");
+    if (nn == null)
+      throw new IOException("HDFS NameNode address is not specified");
+    InetSocketAddress nnAddr = NetUtils.createSocketAddr(nn);
+    LOG.info("HDFS NameNode is at: " + nnAddr.getHostName() + ":" + nnAddr.getPort());
+
+    this.server = new ProxyHttpServer();
+    this.server.setAttribute("proxy.https.port", sslAddr.getPort());
+    this.server.setAttribute("name.node.address", nnAddr);
+    this.server.setAttribute("name.conf", new Configuration());
+    this.server.addGlobalFilter("ProxyFilter", ProxyFilter.class.getName(), null);
+    this.server.addServlet("listPaths", "/listPaths/*", ProxyListPathsServlet.class);
+    this.server.addServlet("data", "/data/*", ProxyFileDataServlet.class);
+    this.server.addServlet("streamFile", "/streamFile/*", ProxyStreamFile.class);
+  }
+  
+  /** add an SSL listener */
+  private void addSslListener(Configuration conf) throws IOException {
+    Configuration sslConf = new Configuration(false);
+    sslConf.addResource(conf.get("hdfsproxy.https.server.keystore.resource",
+        "ssl-server.xml"));
+    server.addSslListener(sslAddr, sslConf);
+  }
+  
+  /** add an http listener, only for testing purposes */
+  void addListener(InetSocketAddress addr, boolean findPort)
+      throws IOException {
+    this.server.addListener(addr, findPort);
+    LOG.warn("An HTTP listener is attached to the proxy server. " +
+    		"It should only be used for testing purposes.");
+  }
+  
+  /** return the http port if any, only for testing purposes */
+  int getPort() throws IOException {
+    return server.getPort();
+  }
+  
+  /**
+   * Start the server.
+   */
+  public void start() throws IOException {
+    this.server.start();
+    LOG.info("HdfsProxy server up at: " + sslAddr.getHostName() + ":"
+        + sslAddr.getPort());
+  }
+  
+  /**
+   * Stop all server threads and wait for all to finish.
+   */
+  public void stop() {
+    try {
+      if (server != null) {
+        server.stop();
+        server.join();
+      }
+    } catch (InterruptedException ie) {
+    }
+  }
+  
+  /**
+   * Wait for service to finish.
+   * (Normally, it runs forever.)
+   */
+  public void join() {
+    try {
+      this.server.join();
+    } catch (InterruptedException ie) {
+    }
+  }
+  
+  private static enum StartupOption {
+    RELOAD("-reloadPermFiles"), CLEAR("-clearUgiCache"), REGULAR("-regular");
+
+    private String name = null;
+
+    private StartupOption(String arg) {
+      this.name = arg;
+    }
+
+    public String getName() {
+      return name;
+    }
+  }
+
+  private static void printUsage() {
+    System.err.println("Usage: hdfsproxy ["
+        + StartupOption.RELOAD.getName() + "] | ["
+        + StartupOption.CLEAR.getName() + "]");
+  }
+
+  private static StartupOption parseArguments(String args[]) {
+    int argsLen = (args == null) ? 0 : args.length;
+    StartupOption startOpt = StartupOption.REGULAR;
+    for (int i = 0; i < argsLen; i++) {
+      String cmd = args[i];
+      if (StartupOption.RELOAD.getName().equalsIgnoreCase(cmd)) {
+        startOpt = StartupOption.RELOAD;
+      } else if (StartupOption.CLEAR.getName().equalsIgnoreCase(cmd)) {
+        startOpt = StartupOption.CLEAR;
+      } else if (StartupOption.REGULAR.getName().equalsIgnoreCase(cmd)) {
+        startOpt = StartupOption.REGULAR;
+      } else
+        return null;
+    }
+    return startOpt;
+  }
+
+  /**
+   * Dummy hostname verifier that is used to bypass hostname checking
+   */
+  private static class DummyHostnameVerifier implements HostnameVerifier {
+    public boolean verify(String hostname, SSLSession session) {
+      return true;
+    }
+  }
+
+  private static HttpsURLConnection openConnection(String hostname, int port,
+      String path) throws IOException {
+    try {
+      final URL url = new URI("https", null, hostname, port, path, null, null)
+          .toURL();
+      HttpsURLConnection conn = (HttpsURLConnection) url.openConnection();
+      // bypass hostname verification
+      conn.setHostnameVerifier(new DummyHostnameVerifier());
+      conn.setRequestMethod("GET");
+      return conn;
+    } catch (URISyntaxException e) {
+      throw (IOException) new IOException().initCause(e);
+    }
+  }
+
+  private static void setupSslProps(Configuration conf) {
+    Configuration sslConf = new Configuration(false);
+    sslConf.addResource(conf.get("hdfsproxy.https.server.keystore.resource",
+        "ssl-server.xml"));
+    System.setProperty("javax.net.ssl.trustStore", sslConf
+        .get("ssl.server.truststore.location"));
+    System.setProperty("javax.net.ssl.trustStorePassword", sslConf.get(
+        "ssl.server.truststore.password", ""));
+    System.setProperty("javax.net.ssl.trustStoreType", sslConf.get(
+        "ssl.server.truststore.type", "jks"));
+    System.setProperty("javax.net.ssl.keyStore", sslConf
+        .get("ssl.server.keystore.location"));
+    System.setProperty("javax.net.ssl.keyStorePassword", sslConf.get(
+        "ssl.server.keystore.password", ""));
+    System.setProperty("javax.net.ssl.keyPassword", sslConf.get(
+        "ssl.server.keystore.keypassword", ""));
+    System.setProperty("javax.net.ssl.keyStoreType", sslConf.get(
+        "ssl.server.keystore.type", "jks"));
+  }
+
+  private static InetSocketAddress getSslAddr(Configuration conf) throws IOException {
+    String addr = conf.get("hdfsproxy.https.address");
+    if (addr == null)
+      throw new IOException("HdfsProxy address is not specified");
+    return NetUtils.createSocketAddr(addr);
+  }
+
+  private static boolean sendCommand(Configuration conf, String path)
+      throws IOException {
+    setupSslProps(conf);
+    int sslPort = getSslAddr(conf).getPort();
+    int err = 0;
+    StringBuilder b = new StringBuilder();
+    HostsFileReader hostsReader = new HostsFileReader(conf.get("hdfsproxy.hosts",
+        "hdfsproxy-hosts"), "");
+    Set<String> hostsList = hostsReader.getHosts();
+    for (String hostname : hostsList) {
+      HttpsURLConnection connection = null;
+      try {
+        connection = openConnection(hostname, sslPort, path);
+        connection.connect();
+        if (connection.getResponseCode() != HttpServletResponse.SC_OK) {
+          b.append("\n\t" + hostname + ": " + connection.getResponseCode()
+              + " " + connection.getResponseMessage());
+          err++;
+        }
+      } catch (IOException e) {
+        b.append("\n\t" + hostname + ": " + e.getLocalizedMessage());
+        err++;
+      } finally {
+        if (connection != null)
+          connection.disconnect();
+      }
+    }
+    if (err > 0) {
+      System.err.print("Command failed on the following "
+          + err + " host" + (err==1?":":"s:") + b.toString() + "\n");
+      return true;
+    }
+    return false;
+  }
+
+  public static HdfsProxy createHdfsProxy(String argv[], Configuration conf)
+      throws IOException {
+    if (conf == null) {
+      conf = new Configuration(false);
+      conf.addResource("hdfsproxy-default.xml");
+    }
+    StartupOption startOpt = parseArguments(argv);
+    if (startOpt == null) {
+      printUsage();
+      return null;
+    }
+
+    switch (startOpt) {
+    case RELOAD:
+      boolean error = sendCommand(conf, "/reloadPermFiles");
+      System.exit(error ? 1 : 0);
+    case CLEAR:
+      error = sendCommand(conf, "/clearUgiCache");
+      System.exit(error ? 1 : 0);
+    default:
+    }
+
+    StringUtils.startupShutdownMessage(HdfsProxy.class, argv, LOG);
+    HdfsProxy proxy = new HdfsProxy(conf);
+    proxy.addSslListener(conf);
+    proxy.start();
+    return proxy;
+  }
+
+  public static void main(String[] argv) throws Exception {
+    try {
+      HdfsProxy proxy = createHdfsProxy(argv, null);
+      if (proxy != null)
+        proxy.join();
+    } catch (Throwable e) {
+      LOG.error(StringUtils.stringifyException(e));
+      System.exit(-1);
+    }
+  }
+}

Added: hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java?rev=712305&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java (added)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java Fri Nov  7 15:02:30 2008
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfsproxy;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import javax.servlet.http.HttpServletRequest;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.hdfs.protocol.ClientProtocol;
+import org.apache.hadoop.hdfs.server.namenode.FileDataServlet;
+import org.apache.hadoop.security.UnixUserGroupInformation;
+
+/** {@inheritDoc} */
+public class ProxyFileDataServlet extends FileDataServlet {
+  /** For java.io.Serializable */
+  private static final long serialVersionUID = 1L;
+
+  /** {@inheritDoc} */
+  @Override
+  protected URI createUri(FileStatus i, UnixUserGroupInformation ugi,
+      ClientProtocol nnproxy, HttpServletRequest request) throws IOException,
+      URISyntaxException {
+    return new URI(request.getScheme(), null, request.getServerName(), request
+        .getServerPort(), "/streamFile", "filename=" + i.getPath() + "&ugi="
+        + ugi, null);
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  protected UnixUserGroupInformation getUGI(HttpServletRequest request) {
+    return (UnixUserGroupInformation) request.getAttribute("authorized.ugi");
+  }
+}

Added: hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFilter.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFilter.java?rev=712305&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFilter.java (added)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFilter.java Fri Nov  7 15:02:30 2008
@@ -0,0 +1,330 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfsproxy;
+
+import java.io.IOException;
+import java.math.BigInteger;
+import java.security.cert.X509Certificate;
+import java.security.cert.CertificateExpiredException;
+import java.security.cert.CertificateNotYetValidException;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.regex.Pattern;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UnixUserGroupInformation;
+
+public class ProxyFilter implements Filter {
+  public static final Log LOG = LogFactory.getLog(ProxyFilter.class);
+
+  /** Pattern for triggering reload of user permissions */
+  protected static final Pattern RELOAD_PATTERN = Pattern
+      .compile("^(/reloadPermFiles)$");
+  /** Pattern for triggering clearing of ugi Cache */
+  protected static final Pattern CLEAR_PATTERN = Pattern
+      .compile("^(/clearUgiCache)$");
+  /** Pattern for a filter to find out if a request is HFTP/HSFTP request */
+  protected static final Pattern HFTP_PATTERN = Pattern
+      .compile("^(/listPaths|/data|/streamFile)$");
+  /**
+   * Pattern for a filter to find out if an HFTP/HSFTP request stores its file
+   * path in the extra path information associated with the URL; if not, the
+   * file path is stored in request parameter "filename"
+   */
+  protected static final Pattern FILEPATH_PATTERN = Pattern
+      .compile("^(/listPaths|/data)$");
+
+  private static volatile Map<String, Set<Path>> permsMap;
+  private static volatile Map<String, Set<BigInteger>> certsMap;
+  static {
+    Configuration conf = new Configuration(false);
+    conf.addResource("hdfsproxy-default.xml");
+    Map<String, Set<Path>> pMap = getPermMap(conf);
+    permsMap = pMap != null ? pMap : new HashMap<String, Set<Path>>();
+    Map<String, Set<BigInteger>> cMap = getCertsMap(conf);
+    certsMap = cMap != null ? cMap : new HashMap<String, Set<BigInteger>>();
+  }
+
+  /** {@inheritDoc} */
+  public void init(FilterConfig filterConfig) throws ServletException {
+  }
+
+  private static Map<String, Set<Path>> getPermMap(Configuration conf) {
+    String permLoc = conf.get("hdfsproxy.user.permissions.file.location",
+        "user-permissions.xml");
+    if (conf.getResource(permLoc) == null) {
+      LOG.warn("HdfsProxy user permissions file not found");
+      return null;
+    }
+    Configuration permConf = new Configuration(false);
+    permConf.addResource(permLoc);
+    Map<String, Set<Path>> map = new HashMap<String, Set<Path>>();
+    for (Map.Entry<String, String> e : permConf) {
+      String k = e.getKey();
+      String v = e.getValue();
+      if (k != null && k.length() != 0 && v != null && v.length() != 0) {
+        Set<Path> pathSet = new HashSet<Path>();
+        String[] paths = v.split(",\\s*");
+        for (String p : paths) {
+          if (p.length() != 0) {
+            pathSet.add(new Path(p));
+          }
+        }
+        map.put(k, pathSet);
+      }
+    }
+    return map;
+  }
+
+  private static Map<String, Set<BigInteger>> getCertsMap(Configuration conf) {
+    String certsLoc = conf.get("hdfsproxy.user.certs.file.location",
+        "user-certs.xml");
+    if (conf.getResource(certsLoc) == null) {
+      LOG.warn("HdfsProxy user certs file not found");
+      return null;
+    }
+    Configuration certsConf = new Configuration(false);
+    certsConf.addResource(certsLoc);
+    Map<String, Set<BigInteger>> map = new HashMap<String, Set<BigInteger>>();
+    for (Map.Entry<String, String> e : certsConf) {
+      String k = e.getKey();
+      String v = e.getValue().trim();
+      if (k != null && k.length() != 0 && v != null && v.length() != 0) {
+        Set<BigInteger> numSet = new HashSet<BigInteger>();
+        String[] serialnumbers = v.split("\\s*,\\s*");
+        for (String num : serialnumbers) {
+          if (num.length() != 0) {
+            numSet.add(new BigInteger(num, 16));
+          }
+        }
+        map.put(k, numSet);
+      }
+    }
+    return map;
+  }
+
+  /** {@inheritDoc} */
+  public void destroy() {
+  }
+
+  /** {@inheritDoc} */
+  public void doFilter(ServletRequest request, ServletResponse response,
+      FilterChain chain) throws IOException, ServletException {
+
+    HttpServletRequest rqst = (HttpServletRequest) request;
+    HttpServletResponse rsp = (HttpServletResponse) response;
+
+    if (LOG.isDebugEnabled()) {
+      StringBuilder b = new StringBuilder("Request from ").append(
+          rqst.getRemoteHost()).append("/").append(rqst.getRemoteAddr())
+          .append(":").append(rqst.getRemotePort());
+
+      @SuppressWarnings("unchecked")
+      Enumeration<String> e = rqst.getAttributeNames();
+      for (; e.hasMoreElements();) {
+        String attribute = e.nextElement();
+        b.append("\n  " + attribute + " => " + rqst.getAttribute(attribute));
+      }
+
+      X509Certificate[] userCerts = (X509Certificate[]) rqst
+          .getAttribute("javax.servlet.request.X509Certificate");
+      if (userCerts != null)
+        for (X509Certificate cert : userCerts)
+          b.append("\n Client certificate Subject Name is "
+              + cert.getSubjectX500Principal().getName());
+
+      b.append("\n The Scheme is " + rqst.getScheme());
+      b.append("\n The Auth Type is " + rqst.getAuthType());
+      b.append("\n The Path Info is " + rqst.getPathInfo());
+      b.append("\n The Translated Path Info is " + rqst.getPathTranslated());
+      b.append("\n The Context Path is " + rqst.getContextPath());
+      b.append("\n The Query String is " + rqst.getQueryString());
+      b.append("\n The Remote User is " + rqst.getRemoteUser());
+      b.append("\n The User Principal is " + rqst.getUserPrincipal());
+      b.append("\n The Request URI is " + rqst.getRequestURI());
+      b.append("\n The Request URL is " + rqst.getRequestURL());
+      b.append("\n The Servlet Path is " + rqst.getServletPath());
+
+      LOG.debug(b.toString());
+    }
+
+    if (rqst.getScheme().equalsIgnoreCase("https")) {
+      boolean isAuthorized = false;
+      X509Certificate[] certs = (X509Certificate[]) rqst
+          .getAttribute("javax.servlet.request.X509Certificate");
+      if (certs == null || certs.length == 0) {
+        rsp.sendError(HttpServletResponse.SC_BAD_REQUEST,
+            "No client SSL certificate received");
+        return;
+      }
+      for (X509Certificate cert : certs) {
+        try {
+          cert.checkValidity();
+        } catch (CertificateExpiredException e) {
+          LOG.info("Received cert for "
+              + cert.getSubjectX500Principal().getName() + " expired");
+          rsp
+              .sendError(HttpServletResponse.SC_FORBIDDEN,
+                  "Certificate expired");
+          return;
+        } catch (CertificateNotYetValidException e) {
+          LOG.info("Received cert for "
+              + cert.getSubjectX500Principal().getName() + " is not yet valid");
+          rsp.sendError(HttpServletResponse.SC_FORBIDDEN,
+              "Certificate is not yet valid");
+          return;
+        }
+      }
+
+      String[] tokens = certs[0].getSubjectX500Principal().getName().split(
+          "\\s*,\\s*");
+      String userID = null;
+      for (String s : tokens) {
+        if (s.startsWith("CN=")) {
+          userID = s;
+          break;
+        }
+      }
+      if (userID == null || userID.length() < 4) {
+        LOG.info("Can't retrieve user ID from SSL certificate");
+        rsp.sendError(HttpServletResponse.SC_FORBIDDEN,
+            "Can't retrieve user ID from SSL certificate");
+        return;
+      }
+      userID = userID.substring(3);
+
+      String servletPath = rqst.getServletPath();
+      if (HFTP_PATTERN.matcher(servletPath).matches()) {
+        // request is an HSFTP request
+        if (FILEPATH_PATTERN.matcher(servletPath).matches()) {
+          // file path as part of the URL
+          isAuthorized = checkPath(userID, certs[0],
+              rqst.getPathInfo() != null ? rqst.getPathInfo() : "/");
+        } else {
+          // file path is stored in "filename" parameter
+          isAuthorized = checkPath(userID, certs[0], rqst
+              .getParameter("filename"));
+        }
+      } else if (RELOAD_PATTERN.matcher(servletPath).matches()
+          && checkUser("Admin", certs[0])) {
+        Configuration conf = new Configuration(false);
+        conf.addResource("hdfsproxy-default.xml");
+        Map<String, Set<Path>> permsMap = getPermMap(conf);
+        Map<String, Set<BigInteger>> certsMap = getCertsMap(conf);
+        if (permsMap == null || certsMap == null) {
+          LOG.warn("Permission files reloading failed");
+          rsp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
+              "Permission files reloading failed");
+          return;
+        }
+        ProxyFilter.permsMap = permsMap;
+        ProxyFilter.certsMap = certsMap;
+        LOG.info("User permissions and user certs files reloaded");
+        rsp.setStatus(HttpServletResponse.SC_OK);
+        return;
+      } else if (CLEAR_PATTERN.matcher(servletPath).matches()
+          && checkUser("Admin", certs[0])) {
+        ProxyUgiManager.clearCache();
+        LOG.info("Ugi cache cleared");
+        rsp.setStatus(HttpServletResponse.SC_OK);
+        return;
+      }
+
+      if (!isAuthorized) {
+        rsp.sendError(HttpServletResponse.SC_FORBIDDEN, "Unauthorized access");
+        return;
+      }
+      // request is authorized, set ugi for servlets
+      UnixUserGroupInformation ugi = ProxyUgiManager
+          .getUgiForUser(userID);
+      if (ugi == null) {
+        LOG.info("Can't retrieve ugi for user " + userID);
+        rsp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
+            "Can't retrieve ugi for user " + userID);
+        return;
+      }
+      rqst.setAttribute("authorized.ugi", ugi);
+    } else { // http request, set ugi for servlets, only for testing purposes
+      String ugi = rqst.getParameter("ugi");
+      rqst.setAttribute("authorized.ugi", new UnixUserGroupInformation(ugi
+          .split(",")));
+    }
+
+    chain.doFilter(request, response);
+  }
+
+  /** check that client's cert is listed in the user certs file */
+  private boolean checkUser(String userID, X509Certificate cert) {
+    Set<BigInteger> numSet = certsMap.get(userID);
+    if (numSet == null) {
+      LOG.info("User " + userID + " is not configured in the user certs file");
+      return false;
+    }
+    if (!numSet.contains(cert.getSerialNumber())) {
+      LOG.info("Cert with serial number " + cert.getSerialNumber()
+          + " is not listed for user " + userID);
+      return false;
+    }
+    return true;
+  }
+
+  /** check that the requested path is listed in the user permissions file */
+  private boolean checkPath(String userID, X509Certificate cert, String pathInfo) {
+    if (!checkUser(userID, cert)) {
+      return false;
+    }
+
+    Set<Path> pathSet = permsMap.get(userID);
+    if (pathSet == null) {
+      LOG.info("User " + userID
+              + " is not listed in the user permissions file");
+      return false;
+    }
+    if (pathInfo == null || pathInfo.length() == 0) {
+      LOG.info("Can't get file path from HTTPS request; user is " + userID);
+      return false;
+    }
+
+    Path userPath = new Path(pathInfo);
+    while (userPath != null) {
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("\n Checking file path " + userPath);
+      }
+      if (pathSet.contains(userPath))
+        return true;
+      userPath = userPath.getParent();
+    }
+    LOG.info("User " + userID + " is not authorized to access " + pathInfo);
+    return false;
+  }
+}

Added: hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyHttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyHttpServer.java?rev=712305&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyHttpServer.java (added)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyHttpServer.java Fri Nov  7 15:02:30 2008
@@ -0,0 +1,252 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfsproxy;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.util.Map;
+
+import javax.servlet.http.HttpServlet;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+
+import org.mortbay.http.SocketListener;
+import org.mortbay.http.SslListener;
+import org.mortbay.jetty.servlet.Dispatcher;
+import org.mortbay.jetty.servlet.FilterHolder;
+import org.mortbay.jetty.servlet.WebApplicationContext;
+import org.mortbay.jetty.servlet.WebApplicationHandler;
+
+/**
+ * Create a Jetty embedded server to answer http/https requests.
+ */
+public class ProxyHttpServer {
+  public static final Log LOG = LogFactory.getLog(ProxyHttpServer.class);
+
+  protected final org.mortbay.jetty.Server webServer;
+  protected final WebApplicationContext webAppContext;
+  protected SslListener sslListener;
+  protected SocketListener listener;
+  protected boolean findPort;
+
+  /**
+   * Create a status server on the given port.
+   * 
+   * @param name
+   *            The name of the server
+   * @param port
+   *            The port to use on the server
+   * @param conf
+   *            Configuration
+   */
+  public ProxyHttpServer() throws IOException {
+    webServer = new org.mortbay.jetty.Server();
+    webAppContext = webServer.addWebApplication("/", "/");
+  }
+
+  /**
+   * Add a servlet to the server.
+   * 
+   * @param name
+   *            The name of the servlet (can be passed as null)
+   * @param pathSpec
+   *            The path spec for the servlet
+   * @param clazz
+   *            The servlet class
+   */
+  public void addServlet(String name, String pathSpec,
+      Class<? extends HttpServlet> clazz) {
+    try {
+      if (name == null) {
+        webAppContext.addServlet(pathSpec, clazz.getName());
+      } else {
+        webAppContext.addServlet(name, pathSpec, clazz.getName());
+      }
+    } catch (ClassNotFoundException cnfe) {
+      throw new RuntimeException("Problem instantiating class", cnfe);
+    } catch (InstantiationException ie) {
+      throw new RuntimeException("Problem instantiating class", ie);
+    } catch (IllegalAccessException iae) {
+      throw new RuntimeException("Problem instantiating class", iae);
+    }
+  }
+
+  /** add a global filter */
+  public void addGlobalFilter(String name, String classname,
+      Map<String, String> parameters) {
+    final String[] ALL_URLS = { "/*" };
+    defineFilter(webAppContext, name, classname, parameters, ALL_URLS);
+    LOG.info("Added global filter" + name + " (class=" + classname + ")");
+  }
+
+  /**
+   * Define a filter for a context and set up default url mappings.
+   */
+  protected void defineFilter(WebApplicationContext ctx, String name,
+      String classname, Map<String, String> parameters, String[] urls) {
+    WebApplicationHandler handler = ctx.getWebApplicationHandler();
+    FilterHolder holder = handler.defineFilter(name, classname);
+    if (parameters != null) {
+      for (Map.Entry<String, String> e : parameters.entrySet()) {
+        holder.setInitParameter(e.getKey(), e.getValue());
+      }
+    }
+    for (String url : urls) {
+      handler.addFilterPathMapping(url, name, Dispatcher.__ALL);
+    }
+  }
+
+  /**
+   * Set a value in the webapp context.
+   * 
+   * @param name
+   *            The name of the attribute
+   * @param value
+   *            The value of the attribute
+   */
+  public void setAttribute(String name, Object value) {
+    webAppContext.setAttribute(name, value);
+  }
+
+  /**
+   * Get the value in the webapp context.
+   * 
+   * @param name
+   *            The name of the attribute
+   * @return The value of the attribute
+   */
+  public Object getAttribute(String name) {
+    return webAppContext.getAttribute(name);
+  }
+
+  /** return the http port that the server is on */
+  public int getPort() throws IOException {
+    if (listener == null)
+      throw new IOException("No http listerner found");
+    return listener.getPort();
+  }
+
+  public void setThreads(int min, int max) {
+    sslListener.setMinThreads(min);
+    sslListener.setMaxThreads(max);
+  }
+
+  /**
+   * Configure an http listener on the server
+   * 
+   * @param addr
+   *            address to listen on
+   * @param findPort
+   *            whether the listener should bind the given port and increment by
+   *            1 until it finds a free port
+   */
+  public void addListener(InetSocketAddress addr, boolean findPort)
+      throws IOException {
+    if (listener != null || webServer.isStarted()) {
+      throw new IOException("Failed to add listener");
+    }
+    this.findPort = findPort;
+    listener = new SocketListener();
+    listener.setHost(addr.getHostName());
+    listener.setPort(addr.getPort());
+    webServer.addListener(listener);
+  }
+
+  /**
+   * Configure an ssl listener on the server.
+   * 
+   * @param addr
+   *            address to listen on
+   * @param sslConf
+   *            conf to retrieve SSL properties from
+   */
+  public void addSslListener(InetSocketAddress addr, Configuration sslConf)
+      throws IOException {
+    if (sslListener != null || webServer.isStarted()) {
+      throw new IOException("Failed to add ssl listener");
+    }
+    sslListener = new SslListener();
+    sslListener.setHost(addr.getHostName());
+    sslListener.setPort(addr.getPort());
+    sslListener.setKeystore(sslConf.get("ssl.server.keystore.location"));
+    sslListener.setPassword(sslConf.get("ssl.server.keystore.password", ""));
+    sslListener.setKeyPassword(sslConf.get("ssl.server.keystore.keypassword",
+        ""));
+    sslListener.setKeystoreType(sslConf.get("ssl.server.keystore.type", "jks"));
+    sslListener.setNeedClientAuth(true);
+    webServer.addListener(sslListener);
+    System.setProperty("javax.net.ssl.trustStore", sslConf
+        .get("ssl.server.truststore.location"));
+    System.setProperty("javax.net.ssl.trustStorePassword", sslConf.get(
+        "ssl.server.truststore.password", ""));
+    System.setProperty("javax.net.ssl.trustStoreType", sslConf.get(
+        "ssl.server.truststore.type", "jks"));
+  }
+
+  /**
+   * Start the server. Does not wait for the server to start.
+   */
+  public void start() throws IOException {
+    try {
+      while (true) {
+        try {
+          webServer.start();
+          break;
+        } catch (org.mortbay.util.MultiException ex) {
+          // if the multi exception contains ONLY a bind exception,
+          // then try the next port number.
+          boolean needNewPort = false;
+          if (ex.size() == 1) {
+            Exception sub = ex.getException(0);
+            if (sub instanceof java.net.BindException) {
+              if (!findPort || listener == null)
+                throw sub; // java.net.BindException
+              needNewPort = true;
+            }
+          }
+          if (!needNewPort)
+            throw ex;
+          listener.setPort(listener.getPort() + 1);
+        }
+      }
+    } catch (IOException ie) {
+      throw ie;
+    } catch (Exception e) {
+      IOException ie = new IOException("Problem starting http server");
+      ie.initCause(e);
+      throw ie;
+    }
+  }
+
+  /**
+   * stop the server
+   */
+  public void stop() throws InterruptedException {
+    webServer.stop();
+  }
+
+  /**
+   * wait for the server
+   */
+  public void join() throws InterruptedException {
+    webServer.join();
+  }
+}

Added: hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java?rev=712305&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java (added)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java Fri Nov  7 15:02:30 2008
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfsproxy;
+
+import javax.servlet.http.HttpServletRequest;
+
+import org.apache.hadoop.hdfs.server.namenode.ListPathsServlet;
+import org.apache.hadoop.security.UnixUserGroupInformation;
+
+/** {@inheritDoc} */
+public class ProxyListPathsServlet extends ListPathsServlet {
+  /** For java.io.Serializable */
+  private static final long serialVersionUID = 1L;
+
+  /** {@inheritDoc} */
+  @Override
+  protected UnixUserGroupInformation getUGI(HttpServletRequest request) {
+    return (UnixUserGroupInformation) request.getAttribute("authorized.ugi");
+  }
+}

Added: hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java?rev=712305&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java (added)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java Fri Nov  7 15:02:30 2008
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfsproxy;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+
+import javax.servlet.ServletContext;
+import javax.servlet.http.HttpServletRequest;
+
+import org.apache.hadoop.hdfs.DFSClient;
+import org.apache.hadoop.hdfs.server.namenode.StreamFile;
+import org.apache.hadoop.security.UnixUserGroupInformation;
+import org.apache.hadoop.conf.Configuration;
+
+/** {@inheritDoc} */
+public class ProxyStreamFile extends StreamFile {
+  /** For java.io.Serializable */
+  private static final long serialVersionUID = 1L;
+
+  /** {@inheritDoc} */
+  @Override
+  protected DFSClient getDFSClient(HttpServletRequest request)
+      throws IOException {
+    ServletContext context = getServletContext();
+    Configuration conf = new Configuration((Configuration) context
+        .getAttribute("name.conf"));
+    UnixUserGroupInformation.saveToConf(conf,
+        UnixUserGroupInformation.UGI_PROPERTY_NAME, getUGI(request));
+    InetSocketAddress nameNodeAddr = (InetSocketAddress) context
+        .getAttribute("name.node.address");
+    return new DFSClient(nameNodeAddr, conf);
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  protected UnixUserGroupInformation getUGI(HttpServletRequest request) {
+    return (UnixUserGroupInformation) request.getAttribute("authorized.ugi");
+  }
+}

Added: hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUgiManager.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUgiManager.java?rev=712305&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUgiManager.java (added)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUgiManager.java Fri Nov  7 15:02:30 2008
@@ -0,0 +1,152 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfsproxy;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.regex.Pattern;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.UnixUserGroupInformation;
+import org.apache.hadoop.util.Shell;
+
+/** An ugi manager that maintains a temporary ugi cache */
+public class ProxyUgiManager {
+  private static final Map<String, CachedUgi> ugiCache = new HashMap<String, CachedUgi>();
+  private static long ugiLifetime;
+  /** username can only comprise of 0-9a-zA-Z and underscore, i.e. \w */
+  private static final Pattern USERNAME_PATTERN = Pattern.compile("^\\w+$");
+  static final int CLEANUP_THRESHOLD = 1000;
+
+  static {
+    Configuration conf = new Configuration(false);
+    conf.addResource("hdfsproxy-default.xml");
+    ugiLifetime = conf.getLong("hdfsproxy.ugi.cache.ugi.lifetime", 15) * 60 * 1000L;
+  }
+
+  /**
+   * retrieve an ugi for a user. try the cache first, if not found, get it by
+   * running a shell command
+   */
+  public static synchronized UnixUserGroupInformation getUgiForUser(
+      String userName) {
+    long now = System.currentTimeMillis();
+    long cutoffTime = now - ugiLifetime;
+    CachedUgi cachedUgi = ugiCache.get(userName);
+    if (cachedUgi != null && cachedUgi.getInitTime() > cutoffTime)
+      return cachedUgi.getUgi();
+    UnixUserGroupInformation ugi = null;
+    try {
+      ugi = getUgi(userName);
+    } catch (IOException e) {
+      return null;
+    }
+    if (ugiCache.size() > CLEANUP_THRESHOLD) { // remove expired ugi's first
+      for (Iterator<Map.Entry<String, CachedUgi>> it = ugiCache.entrySet()
+          .iterator(); it.hasNext();) {
+        Map.Entry<String, CachedUgi> e = it.next();
+        if (e.getValue().getInitTime() < cutoffTime) {
+          it.remove();
+        }
+      }
+    }
+    ugiCache.put(ugi.getUserName(), new CachedUgi(ugi, now));
+    return ugi;
+  }
+
+  /** clear the ugi cache */
+  public static synchronized void clearCache() {
+    ugiCache.clear();
+  }
+
+  /** set ugi lifetime, only for junit testing purposes */
+  static synchronized void setUgiLifetime(long lifetime) {
+    ugiLifetime = lifetime;
+  }
+
+  /** save an ugi to cache, only for junit testing purposes */
+  static synchronized void saveToCache(UnixUserGroupInformation ugi) {
+    ugiCache.put(ugi.getUserName(), new CachedUgi(ugi, System
+        .currentTimeMillis()));
+  }
+
+  /** get cache size, only for junit testing purposes */
+  static synchronized int getCacheSize() {
+    return ugiCache.size();
+  }
+
+  /**
+   * Get the ugi for a user by running shell command "id -Gn"
+   * 
+   * @param userName name of the user
+   * @return ugi of the user
+   * @throws IOException if encounter any error while running the command
+   */
+  private static UnixUserGroupInformation getUgi(String userName)
+      throws IOException {
+    if (userName == null || !USERNAME_PATTERN.matcher(userName).matches())
+      throw new IOException("Invalid username=" + userName);
+    String[] cmd = new String[] { "bash", "-c", "id -Gn '" + userName + "'"};
+    String[] groups = Shell.execCommand(cmd).split("\\s+");
+    return new UnixUserGroupInformation(userName, groups);
+  }
+
+  /** cached ugi object with its associated init time */
+  private static class CachedUgi {
+    final UnixUserGroupInformation ugi;
+    final long initTime;
+
+    CachedUgi(UnixUserGroupInformation ugi, long initTime) {
+      this.ugi = ugi;
+      this.initTime = initTime;
+    }
+
+    UnixUserGroupInformation getUgi() {
+      return ugi;
+    }
+
+    long getInitTime() {
+      return initTime;
+    }
+
+    /** {@inheritDoc} */
+    public int hashCode() {
+      return ugi.hashCode();
+    }
+
+    static boolean isEqual(Object a, Object b) {
+      return a == b || (a != null && a.equals(b));
+    }
+
+    /** {@inheritDoc} */
+    public boolean equals(Object obj) {
+      if (obj == this) {
+        return true;
+      }
+      if (obj != null && obj instanceof CachedUgi) {
+        CachedUgi that = (CachedUgi) obj;
+        return isEqual(this.ugi, that.ugi) && this.initTime == that.initTime;
+      }
+      return false;
+    }
+
+  }
+}

Added: hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestHdfsProxy.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestHdfsProxy.java?rev=712305&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestHdfsProxy.java (added)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestHdfsProxy.java Fri Nov  7 15:02:30 2008
@@ -0,0 +1,262 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfsproxy;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.URI;
+import java.util.Random;
+
+import junit.framework.TestCase;
+
+import org.apache.commons.logging.LogFactory;
+import org.apache.commons.logging.impl.Log4JLogger;
+import org.apache.log4j.Level;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hdfs.server.datanode.DataNode;
+import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.tools.DistCp;
+import org.apache.hadoop.util.ToolRunner;
+
+/**
+ * A JUnit test for HdfsProxy
+ */
+public class TestHdfsProxy extends TestCase {
+  {
+    ((Log4JLogger) LogFactory.getLog("org.apache.hadoop.hdfs.StateChange"))
+        .getLogger().setLevel(Level.OFF);
+    ((Log4JLogger) DataNode.LOG).getLogger().setLevel(Level.OFF);
+    ((Log4JLogger) FSNamesystem.LOG).getLogger().setLevel(Level.OFF);
+    ((Log4JLogger) DistCp.LOG).getLogger().setLevel(Level.ALL);
+  }
+
+  static final URI LOCAL_FS = URI.create("file:///");
+
+  private static final int NFILES = 10;
+  private static String TEST_ROOT_DIR = new Path(System.getProperty(
+      "test.build.data", "/tmp")).toString().replace(' ', '+');
+
+  /**
+   * class MyFile contains enough information to recreate the contents of a
+   * single file.
+   */
+  private static class MyFile {
+    private static Random gen = new Random();
+    private static final int MAX_LEVELS = 3;
+    private static final int MAX_SIZE = 8 * 1024;
+    private static String[] dirNames = { "zero", "one", "two", "three", "four",
+        "five", "six", "seven", "eight", "nine" };
+    private final String name;
+    private int size = 0;
+    private long seed = 0L;
+
+    MyFile() {
+      this(gen.nextInt(MAX_LEVELS));
+    }
+
+    MyFile(int nLevels) {
+      String xname = "";
+      if (nLevels != 0) {
+        int[] levels = new int[nLevels];
+        for (int idx = 0; idx < nLevels; idx++) {
+          levels[idx] = gen.nextInt(10);
+        }
+        StringBuffer sb = new StringBuffer();
+        for (int idx = 0; idx < nLevels; idx++) {
+          sb.append(dirNames[levels[idx]]);
+          sb.append("/");
+        }
+        xname = sb.toString();
+      }
+      long fidx = gen.nextLong() & Long.MAX_VALUE;
+      name = xname + Long.toString(fidx);
+      reset();
+    }
+
+    void reset() {
+      final int oldsize = size;
+      do {
+        size = gen.nextInt(MAX_SIZE);
+      } while (oldsize == size);
+      final long oldseed = seed;
+      do {
+        seed = gen.nextLong() & Long.MAX_VALUE;
+      } while (oldseed == seed);
+    }
+
+    String getName() {
+      return name;
+    }
+
+    int getSize() {
+      return size;
+    }
+
+    long getSeed() {
+      return seed;
+    }
+  }
+
+  private static MyFile[] createFiles(URI fsname, String topdir)
+      throws IOException {
+    return createFiles(FileSystem.get(fsname, new Configuration()), topdir);
+  }
+
+  /**
+   * create NFILES with random names and directory hierarchies with random (but
+   * reproducible) data in them.
+   */
+  private static MyFile[] createFiles(FileSystem fs, String topdir)
+      throws IOException {
+    Path root = new Path(topdir);
+    MyFile[] files = new MyFile[NFILES];
+    for (int i = 0; i < NFILES; i++) {
+      files[i] = createFile(root, fs);
+    }
+    return files;
+  }
+
+  private static MyFile createFile(Path root, FileSystem fs, int levels)
+      throws IOException {
+    MyFile f = levels < 0 ? new MyFile() : new MyFile(levels);
+    Path p = new Path(root, f.getName());
+    FSDataOutputStream out = fs.create(p);
+    byte[] toWrite = new byte[f.getSize()];
+    new Random(f.getSeed()).nextBytes(toWrite);
+    out.write(toWrite);
+    out.close();
+    FileSystem.LOG.info("created: " + p + ", size=" + f.getSize());
+    return f;
+  }
+
+  private static MyFile createFile(Path root, FileSystem fs) throws IOException {
+    return createFile(root, fs, -1);
+  }
+
+  private static boolean checkFiles(FileSystem fs, String topdir, MyFile[] files)
+      throws IOException {
+    return checkFiles(fs, topdir, files, false);
+  }
+
+  private static boolean checkFiles(FileSystem fs, String topdir,
+      MyFile[] files, boolean existingOnly) throws IOException {
+    Path root = new Path(topdir);
+
+    for (int idx = 0; idx < files.length; idx++) {
+      Path fPath = new Path(root, files[idx].getName());
+      try {
+        fs.getFileStatus(fPath);
+        FSDataInputStream in = fs.open(fPath);
+        byte[] toRead = new byte[files[idx].getSize()];
+        byte[] toCompare = new byte[files[idx].getSize()];
+        Random rb = new Random(files[idx].getSeed());
+        rb.nextBytes(toCompare);
+        assertEquals("Cannnot read file.", toRead.length, in.read(toRead));
+        in.close();
+        for (int i = 0; i < toRead.length; i++) {
+          if (toRead[i] != toCompare[i]) {
+            return false;
+          }
+        }
+        toRead = null;
+        toCompare = null;
+      } catch (FileNotFoundException fnfe) {
+        if (!existingOnly) {
+          throw fnfe;
+        }
+      }
+    }
+
+    return true;
+  }
+
+  /** delete directory and everything underneath it. */
+  private static void deldir(FileSystem fs, String topdir) throws IOException {
+    fs.delete(new Path(topdir), true);
+  }
+
+  /** verify hdfsproxy implements the hftp interface */
+  public void testHdfsProxyInterface() throws Exception {
+    MiniDFSCluster cluster = null;
+    HdfsProxy proxy = null;
+    try {
+
+      final Configuration dfsConf = new Configuration();
+      cluster = new MiniDFSCluster(dfsConf, 2, true, null);
+      cluster.waitActive();
+
+      final DistCp distcp = new DistCp(dfsConf);
+      final FileSystem localfs = FileSystem.get(LOCAL_FS, dfsConf);
+      final FileSystem hdfs = cluster.getFileSystem();
+      final Configuration proxyConf = new Configuration(false);
+      proxyConf.set("hdfsproxy.dfs.namenode.address", hdfs.getUri().getHost() + ":"
+          + hdfs.getUri().getPort());
+      proxyConf.set("hdfsproxy.https.address", "127.0.0.1:0");
+      final String namenode = hdfs.getUri().toString();
+      if (namenode.startsWith("hdfs://")) {
+        MyFile[] files = createFiles(LOCAL_FS, TEST_ROOT_DIR + "/srcdat");
+        ToolRunner.run(distcp, new String[] { "-log", namenode + "/logs",
+            "file:///" + TEST_ROOT_DIR + "/srcdat", namenode + "/destdat" });
+        assertTrue("Source and destination directories do not match.",
+            checkFiles(hdfs, "/destdat", files));
+        assertTrue("Log directory does not exist.", hdfs.exists(new Path(
+            namenode + "/logs")));
+
+        proxy = new HdfsProxy(proxyConf);
+        InetSocketAddress proxyAddr = NetUtils.createSocketAddr("127.0.0.1:0");
+        proxy.addListener(proxyAddr, true);
+        proxy.start();
+        final String realProxyAddr = proxyAddr.getHostName() + ":"
+            + proxy.getPort();
+
+        ToolRunner.run(distcp, new String[] {
+            "hftp://" + realProxyAddr + "/destdat", namenode + "/copied1" });
+        assertTrue("Source and copied directories do not match.", checkFiles(
+            hdfs, "/copied1", files));
+
+        ToolRunner.run(distcp, new String[] {
+            "hftp://" + realProxyAddr + "/destdat",
+            "file:///" + TEST_ROOT_DIR + "/copied2" });
+        assertTrue("Source and copied directories do not match.", checkFiles(
+            localfs, TEST_ROOT_DIR + "/copied2", files));
+
+        deldir(hdfs, "/destdat");
+        deldir(hdfs, "/logs");
+        deldir(hdfs, "/copied1");
+        deldir(localfs, TEST_ROOT_DIR + "/srcdat");
+        deldir(localfs, TEST_ROOT_DIR + "/copied2");
+      }
+    } finally {
+      if (cluster != null) {
+        cluster.shutdown();
+      }
+      if (proxy != null) {
+        proxy.stop();
+      }
+    }
+  }
+}

Added: hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyUgiManager.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyUgiManager.java?rev=712305&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyUgiManager.java (added)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyUgiManager.java Fri Nov  7 15:02:30 2008
@@ -0,0 +1,107 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfsproxy;
+
+import org.apache.hadoop.security.UnixUserGroupInformation;
+
+import junit.framework.TestCase;
+
+/** Unit tests for ProxyUgiManager */
+public class TestProxyUgiManager extends TestCase {
+
+  private static final UnixUserGroupInformation root1Ugi = new UnixUserGroupInformation(
+      "root", new String[] { "group1" });
+  private static final UnixUserGroupInformation root2Ugi = new UnixUserGroupInformation(
+      "root", new String[] { "group2" });
+  private static final long ugiLifetime = 1000L; // milliseconds
+
+  /** Test caching functionality */
+  public void testCache() throws Exception {
+    ProxyUgiManager.saveToCache(root1Ugi);
+    UnixUserGroupInformation ugi = ProxyUgiManager.getUgiForUser(root1Ugi
+        .getUserName());
+    assertEquals(root1Ugi, ugi);
+    ProxyUgiManager.saveToCache(root2Ugi);
+    ugi = ProxyUgiManager.getUgiForUser(root2Ugi.getUserName());
+    assertEquals(root2Ugi, ugi);
+  }
+
+  /** Test clearCache method */
+  public void testClearCache() throws Exception {
+    UnixUserGroupInformation ugi = ProxyUgiManager.getUgiForUser(root1Ugi
+        .getUserName());
+    if (root1Ugi.equals(ugi)) {
+      ProxyUgiManager.saveToCache(root2Ugi);
+      ugi = ProxyUgiManager.getUgiForUser(root2Ugi.getUserName());
+      assertEquals(root2Ugi, ugi);
+      ProxyUgiManager.clearCache();
+      ugi = ProxyUgiManager.getUgiForUser(root2Ugi.getUserName());
+      assertFalse(root2Ugi.equals(ugi));
+    } else {
+      ProxyUgiManager.saveToCache(root1Ugi);
+      ugi = ProxyUgiManager.getUgiForUser(root1Ugi.getUserName());
+      assertEquals(root1Ugi, ugi);
+      ProxyUgiManager.clearCache();
+      ugi = ProxyUgiManager.getUgiForUser(root1Ugi.getUserName());
+      assertFalse(root1Ugi.equals(ugi));
+    }
+  }
+
+  /** Test cache timeout */
+  public void testTimeOut() throws Exception {
+    String[] users = new String[] { "root", "nobody", "SYSTEM",
+        "Administrator", "Administrators", "Guest" };
+    String realUser = null;
+    UnixUserGroupInformation ugi = null;
+    ProxyUgiManager.clearCache();
+    for (String user : users) {
+      ugi = ProxyUgiManager.getUgiForUser(user);
+      if (ugi != null) {
+        realUser = user;
+        break;
+      }
+    }
+    if (realUser != null) {
+      ProxyUgiManager.setUgiLifetime(ugiLifetime);
+      ProxyUgiManager.clearCache();
+      UnixUserGroupInformation[] fakedUgis = generateUgi(ProxyUgiManager.CLEANUP_THRESHOLD);
+      for (int i = 0; i < ProxyUgiManager.CLEANUP_THRESHOLD; i++) {
+        ProxyUgiManager.saveToCache(fakedUgis[i]);
+      }
+      assertTrue(ProxyUgiManager.getCacheSize() == ProxyUgiManager.CLEANUP_THRESHOLD);
+      Thread.sleep(ugiLifetime + 1000L);
+      UnixUserGroupInformation newugi = ProxyUgiManager.getUgiForUser(realUser);
+      assertTrue(ProxyUgiManager.getCacheSize() == ProxyUgiManager.CLEANUP_THRESHOLD + 1);
+      assertEquals(newugi, ugi);
+      Thread.sleep(ugiLifetime + 1000L);
+      newugi = ProxyUgiManager.getUgiForUser(realUser);
+      assertTrue(ProxyUgiManager.getCacheSize() == 1);
+      assertEquals(newugi, ugi);
+    }
+  }
+
+  private static UnixUserGroupInformation[] generateUgi(int size) {
+    UnixUserGroupInformation[] ugis = new UnixUserGroupInformation[size];
+    for (int i = 0; i < size; i++) {
+      ugis[i] = new UnixUserGroupInformation("user" + i,
+          new String[] { "group" });
+    }
+    return ugis;
+  }
+}

Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java?rev=712305&r1=712304&r2=712305&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java (original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java Fri Nov  7 15:02:30 2008
@@ -23,15 +23,19 @@
 import java.io.IOException;
 
 import java.net.HttpURLConnection;
+import java.net.InetAddress;
 import java.net.InetSocketAddress;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URL;
+import java.net.UnknownHostException;
 
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
 
 import java.util.ArrayList;
+import java.util.Random;
+
 import javax.security.auth.login.LoginException;
 
 import org.xml.sax.Attributes;
@@ -71,6 +75,7 @@
 
   protected InetSocketAddress nnAddr;
   protected UserGroupInformation ugi; 
+  protected final Random ran = new Random();
 
   protected static final SimpleDateFormat df = ListPathsServlet.df;
 
@@ -85,14 +90,26 @@
 
     nnAddr = NetUtils.createSocketAddr(name.toString());
   }
+  
+  /** randomly pick one from all available IP addresses of a given hostname */
+  protected String pickOneAddress(String hostname) throws UnknownHostException {
+    if ("localhost".equals(hostname))
+      return hostname;
+    InetAddress[] addrs = InetAddress.getAllByName(hostname);
+    if (addrs.length > 1)
+      return addrs[ran.nextInt(addrs.length)].getHostAddress();
+    return addrs[0].getHostAddress();
+  }
 
   @Override
   public URI getUri() {
     try {
-      return new URI("hftp", null, nnAddr.getHostName(), nnAddr.getPort(),
+      return new URI("hftp", null, pickOneAddress(nnAddr.getHostName()), nnAddr.getPort(),
                      null, null, null);
     } catch (URISyntaxException e) {
       return null;
+    } catch (UnknownHostException e) {
+      return null;
     }
   }
 
@@ -104,7 +121,7 @@
   protected HttpURLConnection openConnection(String path, String query)
       throws IOException {
     try {
-      final URL url = new URI("http", null, nnAddr.getHostName(),
+      final URL url = new URI("http", null, pickOneAddress(nnAddr.getHostName()),
           nnAddr.getPort(), path, query, null).toURL();
       if (LOG.isTraceEnabled()) {
         LOG.trace("url=" + url);

Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HsftpFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HsftpFileSystem.java?rev=712305&r1=712304&r2=712305&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HsftpFileSystem.java (original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HsftpFileSystem.java Fri Nov  7 15:02:30 2008
@@ -24,6 +24,7 @@
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URL;
+import java.net.UnknownHostException;
 
 import org.apache.hadoop.fs.Path;
 
@@ -39,7 +40,7 @@
   protected HttpURLConnection openConnection(String path, String query)
       throws IOException {
     try {
-      final URL url = new URI("https", null, nnAddr.getHostName(),
+      final URL url = new URI("https", null, pickOneAddress(nnAddr.getHostName()),
           nnAddr.getPort(), path, query, null).toURL();
       return (HttpURLConnection)url.openConnection();
     } catch (URISyntaxException e) {
@@ -50,10 +51,12 @@
   @Override
   public URI getUri() {
     try {
-      return new URI("hsftp", null, nnAddr.getHostName(), nnAddr.getPort(),
+      return new URI("hsftp", null, pickOneAddress(nnAddr.getHostName()), nnAddr.getPort(),
                      null, null, null);
     } catch (URISyntaxException e) {
       return null;
+    } catch (UnknownHostException e) {
+      return null;
     }
   }
 

Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java?rev=712305&r1=712304&r2=712305&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java (original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java Fri Nov  7 15:02:30 2008
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hdfs.server.namenode;
 
 import java.io.IOException;
+import java.net.InetSocketAddress;
 import java.net.URI;
 import java.net.URISyntaxException;
 
@@ -63,12 +64,12 @@
   protected ClientProtocol createNameNodeProxy(UnixUserGroupInformation ugi
       ) throws IOException {
     ServletContext context = getServletContext();
-    NameNode nn = (NameNode)context.getAttribute("name.node");
+    InetSocketAddress nnAddr = (InetSocketAddress)context.getAttribute("name.node.address");
     Configuration conf = new Configuration(
         (Configuration)context.getAttribute("name.conf"));
     UnixUserGroupInformation.saveToConf(conf,
         UnixUserGroupInformation.UGI_PROPERTY_NAME, ugi);
-    return DFSClient.createNamenode(nn.getNameNodeAddress(), conf);
+    return DFSClient.createNamenode(nnAddr, conf);
   }
 
   /** Create a URI for redirecting request */

Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java?rev=712305&r1=712304&r2=712305&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java (original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java Fri Nov  7 15:02:30 2008
@@ -368,6 +368,7 @@
     this.infoServer.setAttribute("datanode.https.port",
         datanodeSslPort.getPort());
     this.infoServer.setAttribute("name.node", nn);
+    this.infoServer.setAttribute("name.node.address", nn.getNameNodeAddress());
     this.infoServer.setAttribute("name.system.image", getFSImage());
     this.infoServer.setAttribute("name.conf", conf);
     this.infoServer.addInternalServlet("fsck", "/fsck", FsckServlet.class);

Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java?rev=712305&r1=712304&r2=712305&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java (original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java Fri Nov  7 15:02:30 2008
@@ -35,9 +35,11 @@
  */
 public class FileDataServlet extends DfsServlet {
 
-  private URI createUri(FileStatus i, UnixUserGroupInformation ugi,
-      ClientProtocol nnproxy, String scheme)
+  /** Create a redirection URI */
+  protected URI createUri(FileStatus i, UnixUserGroupInformation ugi,
+      ClientProtocol nnproxy, HttpServletRequest request)
       throws IOException, URISyntaxException {
+    String scheme = request.getScheme();
     final DatanodeID host = pickSrcDatanode(i, nnproxy);
     final String hostname;
     if (host instanceof DatanodeInfo) {
@@ -52,7 +54,7 @@
         "/streamFile", "filename=" + i.getPath() + "&ugi=" + ugi, null);
   }
 
-  private final static JspHelper jspHelper = new JspHelper();
+  private static JspHelper jspHelper = null;
 
   /** Select a datanode to service this request.
    * Currently, this looks at no more than the first five blocks of a file,
@@ -60,6 +62,11 @@
    */
   private static DatanodeID pickSrcDatanode(FileStatus i,
       ClientProtocol nnproxy) throws IOException {
+    // a race condition can happen by initializing a static member this way.
+    // A proper fix should make JspHelper a singleton. Since it doesn't affect 
+    // correctness, we leave it as is for now.
+    if (jspHelper == null)
+      jspHelper = new JspHelper();
     final LocatedBlocks blks = nnproxy.getBlockLocations(
         i.getPath().toUri().getPath(), 0, 1);
     if (i.getLen() == 0 || blks.getLocatedBlocks().size() <= 0) {
@@ -87,7 +94,7 @@
       FileStatus info = nnproxy.getFileInfo(path);
       if ((info != null) && !info.isDir()) {
         response.sendRedirect(createUri(info, ugi, nnproxy,
-              request.getScheme()).toURL().toString());
+              request).toURL().toString());
       } else if (info == null){
         response.sendError(400, "cat: File not found " + path);
       } else {

Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/StreamFile.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/StreamFile.java?rev=712305&r1=712304&r2=712305&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/StreamFile.java (original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/StreamFile.java Fri Nov  7 15:02:30 2008
@@ -36,12 +36,18 @@
       nameNodeAddr = datanode.getNameNodeAddr();
     }
   }
-  public void doGet(HttpServletRequest request, HttpServletResponse response)
-    throws ServletException, IOException {
+  
+  /** getting a client for connecting to dfs */
+  protected DFSClient getDFSClient(HttpServletRequest request)
+      throws IOException {
     Configuration conf = new Configuration(masterConf);
     UnixUserGroupInformation.saveToConf(conf,
         UnixUserGroupInformation.UGI_PROPERTY_NAME, getUGI(request));
-
+    return new DFSClient(nameNodeAddr, conf);
+  }
+  
+  public void doGet(HttpServletRequest request, HttpServletResponse response)
+    throws ServletException, IOException {
     String filename = request.getParameter("filename");
     if (filename == null || filename.length() == 0) {
       response.setContentType("text/plain");
@@ -49,7 +55,7 @@
       out.print("Invalid input");
       return;
     }
-    DFSClient dfs = new DFSClient(nameNodeAddr, conf);
+    DFSClient dfs = getDFSClient(request);
     FSInputStream in = dfs.open(filename);
     OutputStream os = response.getOutputStream();
     response.setHeader("Content-Disposition", "attachment; filename=\"" + 



Mime
View raw message