hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From t...@apache.org
Subject svn commit: r1212060 [4/8] - in /hadoop/common/trunk/hadoop-hdfs-project: ./ hadoop-hdfs-httpfs/ hadoop-hdfs-httpfs/src/ hadoop-hdfs-httpfs/src/main/ hadoop-hdfs-httpfs/src/main/conf/ hadoop-hdfs-httpfs/src/main/java/ hadoop-hdfs-httpfs/src/main/java/o...
Date Thu, 08 Dec 2011 19:25:33 GMT
Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/ProxyUserService.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/ProxyUserService.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/ProxyUserService.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/ProxyUserService.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,176 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.service.security;
+
+import org.apache.hadoop.lib.lang.XException;
+import org.apache.hadoop.lib.server.BaseService;
+import org.apache.hadoop.lib.server.ServiceException;
+import org.apache.hadoop.lib.service.Groups;
+import org.apache.hadoop.lib.service.ProxyUser;
+import org.apache.hadoop.lib.util.Check;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.security.AccessControlException;
+import java.text.MessageFormat;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+public class ProxyUserService extends BaseService implements ProxyUser {
+  private static Logger LOG = LoggerFactory.getLogger(ProxyUserService.class);
+
+  public enum ERROR implements XException.ERROR {
+    PRXU01("Could not normalize host name [{0}], {1}"),
+    PRXU02("Missing [{0}] property");
+
+    private String template;
+
+    ERROR(String template) {
+      this.template = template;
+    }
+
+    @Override
+    public String getTemplate() {
+      return template;
+    }
+  }
+
+  private static final String PREFIX = "proxyuser";
+  private static final String GROUPS = ".groups";
+  private static final String HOSTS = ".hosts";
+
+  private Map<String, Set<String>> proxyUserHosts = new HashMap<String, Set<String>>();
+  private Map<String, Set<String>> proxyUserGroups = new HashMap<String, Set<String>>();
+
+  public ProxyUserService() {
+    super(PREFIX);
+  }
+
+  @Override
+  public Class getInterface() {
+    return ProxyUser.class;
+  }
+
+  @Override
+  public Class[] getServiceDependencies() {
+    return new Class[]{Groups.class};
+  }
+
+  @Override
+  protected void init() throws ServiceException {
+    for (Map.Entry<String, String> entry : getServiceConfig()) {
+      String key = entry.getKey();
+      if (key.endsWith(GROUPS)) {
+        String proxyUser = key.substring(0, key.lastIndexOf(GROUPS));
+        if (getServiceConfig().get(proxyUser + HOSTS) == null) {
+          throw new ServiceException(ERROR.PRXU02, getPrefixedName(proxyUser + HOSTS));
+        }
+        String value = entry.getValue().trim();
+        LOG.info("Loading proxyuser settings [{}]=[{}]", key, value);
+        Set<String> values = null;
+        if (!value.equals("*")) {
+          values = new HashSet<String>(Arrays.asList(value.split(",")));
+        }
+        proxyUserGroups.put(proxyUser, values);
+      }
+      if (key.endsWith(HOSTS)) {
+        String proxyUser = key.substring(0, key.lastIndexOf(HOSTS));
+        if (getServiceConfig().get(proxyUser + GROUPS) == null) {
+          throw new ServiceException(ERROR.PRXU02, getPrefixedName(proxyUser + GROUPS));
+        }
+        String value = entry.getValue().trim();
+        LOG.info("Loading proxyuser settings [{}]=[{}]", key, value);
+        Set<String> values = null;
+        if (!value.equals("*")) {
+          String[] hosts = value.split(",");
+          for (int i = 0; i < hosts.length; i++) {
+            String originalName = hosts[i];
+            try {
+              hosts[i] = normalizeHostname(originalName);
+            } catch (Exception ex) {
+              throw new ServiceException(ERROR.PRXU01, originalName, ex.getMessage(), ex);
+            }
+            LOG.info("  Hostname, original [{}], normalized [{}]", originalName, hosts[i]);
+          }
+          values = new HashSet<String>(Arrays.asList(hosts));
+        }
+        proxyUserHosts.put(proxyUser, values);
+      }
+    }
+  }
+
+  @Override
+  public void validate(String proxyUser, String proxyHost, String doAsUser) throws IOException,
+    AccessControlException {
+    Check.notEmpty(proxyUser, "proxyUser");
+    Check.notEmpty(proxyHost, "proxyHost");
+    Check.notEmpty(doAsUser, "doAsUser");
+    LOG.debug("Authorization check proxyuser [{}] host [{}] doAs [{}]",
+              new Object[]{proxyUser, proxyHost, doAsUser});
+    if (proxyUserHosts.containsKey(proxyUser)) {
+      proxyHost = normalizeHostname(proxyHost);
+      validateRequestorHost(proxyUser, proxyHost, proxyUserHosts.get(proxyUser));
+      validateGroup(proxyUser, doAsUser, proxyUserGroups.get(proxyUser));
+    } else {
+      throw new AccessControlException(MessageFormat.format("User [{0}] not defined as proxyuser", proxyUser));
+    }
+  }
+
+  private void validateRequestorHost(String proxyUser, String hostname, Set<String> validHosts)
+    throws IOException, AccessControlException {
+    if (validHosts != null) {
+      if (!validHosts.contains(hostname) && !validHosts.contains(normalizeHostname(hostname))) {
+        throw new AccessControlException(MessageFormat.format("Unauthorized host [{0}] for proxyuser [{1}]",
+                                                              hostname, proxyUser));
+      }
+    }
+  }
+
+  private void validateGroup(String proxyUser, String user, Set<String> validGroups) throws IOException,
+    AccessControlException {
+    if (validGroups != null) {
+      List<String> userGroups = getServer().get(Groups.class).getGroups(user);
+      for (String g : validGroups) {
+        if (userGroups.contains(g)) {
+          return;
+        }
+      }
+      throw new AccessControlException(
+        MessageFormat.format("Unauthorized proxyuser [{0}] for user [{1}], not in proxyuser groups",
+                             proxyUser, user));
+    }
+  }
+
+  private String normalizeHostname(String name) {
+    try {
+      InetAddress address = InetAddress.getByName(name);
+      return address.getCanonicalHostName();
+    } catch (IOException ex) {
+      throw new AccessControlException(MessageFormat.format("Could not resolve host [{0}], {1}", name,
+                                                            ex.getMessage()));
+    }
+  }
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/FileSystemReleaseFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/FileSystemReleaseFilter.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/FileSystemReleaseFilter.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/FileSystemReleaseFilter.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,110 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.servlet;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.lib.service.FileSystemAccess;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import java.io.IOException;
+
+/**
+ * The <code>FileSystemReleaseFilter</code> releases back to the
+ * {@link FileSystemAccess} service a <code>FileSystem</code> instance.
+ * <p/>
+ * This filter is useful in situations where a servlet request
+ * is streaming out HDFS data and the corresponding filesystem
+ * instance have to be closed after the streaming completes.
+ */
+public abstract class FileSystemReleaseFilter implements Filter {
+  private static final ThreadLocal<FileSystem> FILE_SYSTEM_TL = new ThreadLocal<FileSystem>();
+
+  /**
+   * Initializes the filter.
+   * <p/>
+   * This implementation is a NOP.
+   *
+   * @param filterConfig filter configuration.
+   *
+   * @throws ServletException thrown if the filter could not be initialized.
+   */
+  @Override
+  public void init(FilterConfig filterConfig) throws ServletException {
+  }
+
+  /**
+   * It delegates the incoming request to the <code>FilterChain</code>, and
+   * at its completion (in a finally block) releases the filesystem instance
+   * back to the {@link FileSystemAccess} service.
+   *
+   * @param servletRequest servlet request.
+   * @param servletResponse servlet response.
+   * @param filterChain filter chain.
+   *
+   * @throws IOException thrown if an IO error occurrs.
+   * @throws ServletException thrown if a servet error occurrs.
+   */
+  @Override
+  public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain)
+    throws IOException, ServletException {
+    try {
+      filterChain.doFilter(servletRequest, servletResponse);
+    } finally {
+      FileSystem fs = FILE_SYSTEM_TL.get();
+      if (fs != null) {
+        FILE_SYSTEM_TL.remove();
+        getFileSystemAccess().releaseFileSystem(fs);
+      }
+    }
+  }
+
+  /**
+   * Destroys the filter.
+   * <p/>
+   * This implementation is a NOP.
+   */
+  @Override
+  public void destroy() {
+  }
+
+  /**
+   * Static method that sets the <code>FileSystem</code> to release back to
+   * the {@link FileSystemAccess} service on servlet request completion.
+   *
+   * @param fs fileystem instance.
+   */
+  public static void setFileSystem(FileSystem fs) {
+    FILE_SYSTEM_TL.set(fs);
+  }
+
+  /**
+   * Abstract method to be implemetned by concrete implementations of the
+   * filter that return the {@link FileSystemAccess} service to which the filesystem
+   * will be returned to.
+   *
+   * @return the FileSystemAccess service.
+   */
+  protected abstract FileSystemAccess getFileSystemAccess();
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/HostnameFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/HostnameFilter.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/HostnameFilter.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/HostnameFilter.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,91 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.servlet;
+
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import java.io.IOException;
+import java.net.InetAddress;
+
+/**
+ * Filter that resolves the requester hostname.
+ */
+public class HostnameFilter implements Filter {
+  static final ThreadLocal<String> HOSTNAME_TL = new ThreadLocal<String>();
+
+  /**
+   * Initializes the filter.
+   * <p/>
+   * This implementation is a NOP.
+   *
+   * @param config filter configuration.
+   *
+   * @throws ServletException thrown if the filter could not be initialized.
+   */
+  @Override
+  public void init(FilterConfig config) throws ServletException {
+  }
+
+  /**
+   * Resolves the requester hostname and delegates the request to the chain.
+   * <p/>
+   * The requester hostname is available via the {@link #get} method.
+   *
+   * @param request servlet request.
+   * @param response servlet response.
+   * @param chain filter chain.
+   *
+   * @throws IOException thrown if an IO error occurrs.
+   * @throws ServletException thrown if a servet error occurrs.
+   */
+  @Override
+  public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
+    throws IOException, ServletException {
+    try {
+      String hostname = InetAddress.getByName(request.getRemoteAddr()).getCanonicalHostName();
+      HOSTNAME_TL.set(hostname);
+      chain.doFilter(request, response);
+    } finally {
+      HOSTNAME_TL.remove();
+    }
+  }
+
+  /**
+   * Returns the requester hostname.
+   *
+   * @return the requester hostname.
+   */
+  public static String get() {
+    return HOSTNAME_TL.get();
+  }
+
+  /**
+   * Destroys the filter.
+   * <p/>
+   * This implementation is a NOP.
+   */
+  @Override
+  public void destroy() {
+  }
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/MDCFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/MDCFilter.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/MDCFilter.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/MDCFilter.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,101 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.servlet;
+
+import org.slf4j.MDC;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import java.io.IOException;
+import java.security.Principal;
+
+/**
+ * Filter that sets request contextual information for the slf4j MDC.
+ * <p/>
+ * It sets the following values:
+ * <ul>
+ * <li>hostname: if the {@link HostnameFilter} is present and configured
+ * before this filter</li>
+ * <li>user: the <code>HttpServletRequest.getUserPrincipal().getName()</code></li>
+ * <li>method: the HTTP method fo the request (GET, POST, ...)</li>
+ * <li>path: the path of the request URL</li>
+ * </ul>
+ */
+public class MDCFilter implements Filter {
+
+  /**
+   * Initializes the filter.
+   * <p/>
+   * This implementation is a NOP.
+   *
+   * @param config filter configuration.
+   *
+   * @throws ServletException thrown if the filter could not be initialized.
+   */
+  @Override
+  public void init(FilterConfig config) throws ServletException {
+  }
+
+  /**
+   * Sets the slf4j <code>MDC</code> and delegates the request to the chain.
+   *
+   * @param request servlet request.
+   * @param response servlet response.
+   * @param chain filter chain.
+   *
+   * @throws IOException thrown if an IO error occurrs.
+   * @throws ServletException thrown if a servet error occurrs.
+   */
+  @Override
+  public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
+    throws IOException, ServletException {
+    try {
+      MDC.clear();
+      String hostname = HostnameFilter.get();
+      if (hostname != null) {
+        MDC.put("hostname", HostnameFilter.get());
+      }
+      Principal principal = ((HttpServletRequest) request).getUserPrincipal();
+      String user = (principal != null) ? principal.getName() : null;
+      if (user != null) {
+        MDC.put("user", user);
+      }
+      MDC.put("method", ((HttpServletRequest) request).getMethod());
+      MDC.put("path", ((HttpServletRequest) request).getPathInfo());
+      chain.doFilter(request, response);
+    } finally {
+      MDC.clear();
+    }
+  }
+
+  /**
+   * Destroys the filter.
+   * <p/>
+   * This implementation is a NOP.
+   */
+  @Override
+  public void destroy() {
+  }
+}
+

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,159 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.servlet;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.lib.server.Server;
+import org.apache.hadoop.lib.server.ServerException;
+
+import javax.servlet.ServletContextEvent;
+import javax.servlet.ServletContextListener;
+import java.text.MessageFormat;
+
+/**
+ * {@link Server} subclass that implements <code>ServletContextListener</code>
+ * and uses its lifecycle to start and stop the server.
+ */
+public abstract class ServerWebApp extends Server implements ServletContextListener {
+
+  private static final String HOME_DIR = ".home.dir";
+  private static final String CONFIG_DIR = ".config.dir";
+  private static final String LOG_DIR = ".log.dir";
+  private static final String TEMP_DIR = ".temp.dir";
+
+  private static ThreadLocal<String> HOME_DIR_TL = new ThreadLocal<String>();
+
+  /**
+   * Method for testing purposes.
+   */
+  public static void setHomeDirForCurrentThread(String homeDir) {
+    HOME_DIR_TL.set(homeDir);
+  }
+
+  /**
+   * Constructor for testing purposes.
+   */
+  protected ServerWebApp(String name, String homeDir, String configDir, String logDir, String tempDir,
+                         Configuration config) {
+    super(name, homeDir, configDir, logDir, tempDir, config);
+  }
+
+  /**
+   * Constructor for testing purposes.
+   */
+  protected ServerWebApp(String name, String homeDir, Configuration config) {
+    super(name, homeDir, config);
+  }
+
+  /**
+   * Constructor. Subclasses must have a default constructor specifying
+   * the server name.
+   * <p/>
+   * The server name is used to resolve the Java System properties that define
+   * the server home, config, log and temp directories.
+   * <p/>
+   * The home directory is looked in the Java System property
+   * <code>#SERVER_NAME#.home.dir</code>.
+   * <p/>
+   * The config directory is looked in the Java System property
+   * <code>#SERVER_NAME#.config.dir</code>, if not defined it resolves to
+   * the <code>#SERVER_HOME_DIR#/conf</code> directory.
+   * <p/>
+   * The log directory is looked in the Java System property
+   * <code>#SERVER_NAME#.log.dir</code>, if not defined it resolves to
+   * the <code>#SERVER_HOME_DIR#/log</code> directory.
+   * <p/>
+   * The temp directory is looked in the Java System property
+   * <code>#SERVER_NAME#.temp.dir</code>, if not defined it resolves to
+   * the <code>#SERVER_HOME_DIR#/temp</code> directory.
+   *
+   * @param name server name.
+   */
+  public ServerWebApp(String name) {
+    super(name, getHomeDir(name),
+          getDir(name, CONFIG_DIR, getHomeDir(name) + "/conf"),
+          getDir(name, LOG_DIR, getHomeDir(name) + "/log"),
+          getDir(name, TEMP_DIR, getHomeDir(name) + "/temp"), null);
+  }
+
+  /**
+   * Returns the server home directory.
+   * <p/>
+   * It is looked up in the Java System property
+   * <code>#SERVER_NAME#.home.dir</code>.
+   *
+   * @param name the server home directory.
+   *
+   * @return the server home directory.
+   */
+  static String getHomeDir(String name) {
+    String homeDir = HOME_DIR_TL.get();
+    if (homeDir == null) {
+      String sysProp = name + HOME_DIR;
+      homeDir = System.getProperty(sysProp);
+      if (homeDir == null) {
+        throw new IllegalArgumentException(MessageFormat.format("System property [{0}] not defined", sysProp));
+      }
+    }
+    return homeDir;
+  }
+
+  /**
+   * Convenience method that looks for Java System property defining a
+   * diretory and if not present defaults to the specified directory.
+   *
+   * @param name server name, used as prefix of the Java System property.
+   * @param dirType dir type, use as postfix of the Java System property.
+   * @param defaultDir the default directory to return if the Java System
+   * property <code>name + dirType</code> is not defined.
+   *
+   * @return the directory defined in the Java System property or the
+   *         the default directory if the Java System property is not defined.
+   */
+  static String getDir(String name, String dirType, String defaultDir) {
+    String sysProp = name + dirType;
+    return System.getProperty(sysProp, defaultDir);
+  }
+
+  /**
+   * Initializes the <code>ServletContextListener</code> which initializes
+   * the Server.
+   *
+   * @param event servelt context event.
+   */
+  public void contextInitialized(ServletContextEvent event) {
+    try {
+      init();
+    } catch (ServerException ex) {
+      event.getServletContext().log("ERROR: " + ex.getMessage());
+      throw new RuntimeException(ex);
+    }
+  }
+
+  /**
+   * Destroys the <code>ServletContextListener</code> which destroys
+   * the Server.
+   *
+   * @param event servelt context event.
+   */
+  public void contextDestroyed(ServletContextEvent event) {
+    destroy();
+  }
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/Check.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/Check.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/Check.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/Check.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,199 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.util;
+
+import java.text.MessageFormat;
+import java.util.List;
+import java.util.regex.Pattern;
+
+/**
+ * Utility methods to check preconditions.
+ * <p/>
+ * Commonly used for method arguments preconditions.
+ */
+public class Check {
+
+  /**
+   * Verifies a variable is not NULL.
+   *
+   * @param obj the variable to check.
+   * @param name the name to use in the exception message.
+   *
+   * @return the variable.
+   *
+   * @throws IllegalArgumentException if the variable is NULL.
+   */
+  public static <T> T notNull(T obj, String name) {
+    if (obj == null) {
+      throw new IllegalArgumentException(name + " cannot be null");
+    }
+    return obj;
+  }
+
+  /**
+   * Verifies a list does not have any NULL elements.
+   *
+   * @param list the list to check.
+   * @param name the name to use in the exception message.
+   *
+   * @return the list.
+   *
+   * @throws IllegalArgumentException if the list has NULL elements.
+   */
+  public static <T> List<T> notNullElements(List<T> list, String name) {
+    notNull(list, name);
+    for (int i = 0; i < list.size(); i++) {
+      notNull(list.get(i), MessageFormat.format("list [{0}] element [{1}]", name, i));
+    }
+    return list;
+  }
+
+  /**
+   * Verifies a string is not NULL and not emtpy
+   *
+   * @param str the variable to check.
+   * @param name the name to use in the exception message.
+   *
+   * @return the variable.
+   *
+   * @throws IllegalArgumentException if the variable is NULL or empty.
+   */
+  public static String notEmpty(String str, String name) {
+    if (str == null) {
+      throw new IllegalArgumentException(name + " cannot be null");
+    }
+    if (str.length() == 0) {
+      throw new IllegalArgumentException(name + " cannot be empty");
+    }
+    return str;
+  }
+
+  /**
+   * Verifies a string list is not NULL and not emtpy
+   *
+   * @param list the list to check.
+   * @param name the name to use in the exception message.
+   *
+   * @return the variable.
+   *
+   * @throws IllegalArgumentException if the string list has NULL or empty
+   * elements.
+   */
+  public static List<String> notEmptyElements(List<String> list, String name) {
+    notNull(list, name);
+    for (int i = 0; i < list.size(); i++) {
+      notEmpty(list.get(i), MessageFormat.format("list [{0}] element [{1}]", name, i));
+    }
+    return list;
+  }
+
+  private static final String IDENTIFIER_PATTERN_STR = "[a-zA-z_][a-zA-Z0-9_\\-]*";
+
+  private static final Pattern IDENTIFIER_PATTERN = Pattern.compile("^" + IDENTIFIER_PATTERN_STR + "$");
+
+  /**
+   * Verifies a value is a valid identifier,
+   * <code>[a-zA-z_][a-zA-Z0-9_\-]*</code>, up to a maximum length.
+   *
+   * @param value string to check if it is a valid identifier.
+   * @param maxLen maximun length.
+   * @param name the name to use in the exception message.
+   *
+   * @return the value.
+   *
+   * @throws IllegalArgumentException if the string is not a valid identifier.
+   */
+  public static String validIdentifier(String value, int maxLen, String name) {
+    Check.notEmpty(value, name);
+    if (value.length() > maxLen) {
+      throw new IllegalArgumentException(
+        MessageFormat.format("[{0}] = [{1}] exceeds max len [{2}]", name, value, maxLen));
+    }
+    if (!IDENTIFIER_PATTERN.matcher(value).find()) {
+      throw new IllegalArgumentException(
+        MessageFormat.format("[{0}] = [{1}] must be '{2}'", name, value, IDENTIFIER_PATTERN_STR));
+    }
+    return value;
+  }
+
+  /**
+   * Verifies an integer is greater than zero.
+   *
+   * @param value integer value.
+   * @param name the name to use in the exception message.
+   *
+   * @return the value.
+   *
+   * @throws IllegalArgumentException if the integer is zero or less.
+   */
+  public static int gt0(int value, String name) {
+    return (int) gt0((long) value, name);
+  }
+
+  /**
+   * Verifies an long is greater than zero.
+   *
+   * @param value long value.
+   * @param name the name to use in the exception message.
+   *
+   * @return the value.
+   *
+   * @throws IllegalArgumentException if the long is zero or less.
+   */
+  public static long gt0(long value, String name) {
+    if (value <= 0) {
+      throw new IllegalArgumentException(
+        MessageFormat.format("parameter [{0}] = [{1}] must be greater than zero", name, value));
+    }
+    return value;
+  }
+
+  /**
+   * Verifies an integer is greater or equal to zero.
+   *
+   * @param value integer value.
+   * @param name the name to use in the exception message.
+   *
+   * @return the value.
+   *
+   * @throws IllegalArgumentException if the integer is greater or equal to zero.
+   */
+  public static int ge0(int value, String name) {
+    return (int) ge0((long) value, name);
+  }
+
+  /**
+   * Verifies an long is greater or equal to zero.
+   *
+   * @param value integer value.
+   * @param name the name to use in the exception message.
+   *
+   * @return the value.
+   *
+   * @throws IllegalArgumentException if the long is greater or equal to zero.
+   */
+  public static long ge0(long value, String name) {
+    if (value < 0) {
+      throw new IllegalArgumentException(MessageFormat.format(
+        "parameter [{0}] = [{1}] must be greater than or equals zero", name, value));
+    }
+    return value;
+  }
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/ConfigurationUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/ConfigurationUtils.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/ConfigurationUtils.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/ConfigurationUtils.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,157 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.util;
+
+import org.apache.hadoop.conf.Configuration;
+import org.w3c.dom.DOMException;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+import org.w3c.dom.Text;
+import org.xml.sax.SAXException;
+
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+import javax.xml.parsers.ParserConfigurationException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Map;
+
+/**
+ * Configuration utilities.
+ */
+public abstract class ConfigurationUtils {
+
+  /**
+   * Copy configuration key/value pairs from one configuration to another if a property exists in the target, it gets
+   * replaced.
+   *
+   * @param source source configuration.
+   * @param target target configuration.
+   */
+  public static void copy(Configuration source, Configuration target) {
+    Check.notNull(source, "source");
+    Check.notNull(target, "target");
+    for (Map.Entry<String, String> entry : source) {
+      target.set(entry.getKey(), entry.getValue());
+    }
+  }
+
+  /**
+   * Injects configuration key/value pairs from one configuration to another if the key does not exist in the target
+   * configuration.
+   *
+   * @param source source configuration.
+   * @param target target configuration.
+   */
+  public static void injectDefaults(Configuration source, Configuration target) {
+    Check.notNull(source, "source");
+    Check.notNull(target, "target");
+    for (Map.Entry<String, String> entry : source) {
+      if (target.get(entry.getKey()) == null) {
+        target.set(entry.getKey(), entry.getValue());
+      }
+    }
+  }
+
+  /**
+   * Returns a new ConfigurationUtils instance with all inline values resolved.
+   *
+   * @return a new ConfigurationUtils instance with all inline values resolved.
+   */
+  public static Configuration resolve(Configuration conf) {
+    Configuration resolved = new Configuration(false);
+    for (Map.Entry<String, String> entry : conf) {
+      resolved.set(entry.getKey(), conf.get(entry.getKey()));
+    }
+    return resolved;
+  }
+
+  // Canibalized from FileSystemAccess <code>Configuration.loadResource()</code>.
+
+  /**
+   * Create a configuration from an InputStream.
+   * <p/>
+   * ERROR canibalized from <code>Configuration.loadResource()</code>.
+   *
+   * @param is inputstream to read the configuration from.
+   *
+   * @throws IOException thrown if the configuration could not be read.
+   */
+  public static void load(Configuration conf, InputStream is) throws IOException {
+    try {
+      DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance();
+      // ignore all comments inside the xml file
+      docBuilderFactory.setIgnoringComments(true);
+      DocumentBuilder builder = docBuilderFactory.newDocumentBuilder();
+      Document doc = builder.parse(is);
+      parseDocument(conf, doc);
+    } catch (SAXException e) {
+      throw new IOException(e);
+    } catch (ParserConfigurationException e) {
+      throw new IOException(e);
+    }
+  }
+
+  // Canibalized from FileSystemAccess <code>Configuration.loadResource()</code>.
+  private static void parseDocument(Configuration conf, Document doc) throws IOException {
+    try {
+      Element root = doc.getDocumentElement();
+      if (!"configuration".equals(root.getTagName())) {
+        throw new IOException("bad conf file: top-level element not <configuration>");
+      }
+      NodeList props = root.getChildNodes();
+      for (int i = 0; i < props.getLength(); i++) {
+        Node propNode = props.item(i);
+        if (!(propNode instanceof Element)) {
+          continue;
+        }
+        Element prop = (Element) propNode;
+        if (!"property".equals(prop.getTagName())) {
+          throw new IOException("bad conf file: element not <property>");
+        }
+        NodeList fields = prop.getChildNodes();
+        String attr = null;
+        String value = null;
+        for (int j = 0; j < fields.getLength(); j++) {
+          Node fieldNode = fields.item(j);
+          if (!(fieldNode instanceof Element)) {
+            continue;
+          }
+          Element field = (Element) fieldNode;
+          if ("name".equals(field.getTagName()) && field.hasChildNodes()) {
+            attr = ((Text) field.getFirstChild()).getData().trim();
+          }
+          if ("value".equals(field.getTagName()) && field.hasChildNodes()) {
+            value = ((Text) field.getFirstChild()).getData();
+          }
+        }
+
+        if (attr != null && value != null) {
+          conf.set(attr, value);
+        }
+      }
+
+    } catch (DOMException e) {
+      throw new IOException(e);
+    }
+  }
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/BooleanParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/BooleanParam.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/BooleanParam.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/BooleanParam.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.wsrs;
+
+import java.text.MessageFormat;
+
+public abstract class BooleanParam extends Param<Boolean> {
+
+  public BooleanParam(String name, String str) {
+    value = parseParam(name, str);
+  }
+
+  protected Boolean parse(String str) throws Exception {
+    if (str.equalsIgnoreCase("true")) {
+      return true;
+    }
+    if (str.equalsIgnoreCase("false")) {
+      return false;
+    }
+    throw new IllegalArgumentException(MessageFormat.format("Invalid value [{0}], must be a boolean", str));
+  }
+
+  @Override
+  protected String getDomain() {
+    return "a boolean";
+  }
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ByteParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ByteParam.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ByteParam.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ByteParam.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.wsrs;
+
+public abstract class ByteParam extends Param<Byte> {
+
+  public ByteParam(String name, String str) {
+    value = parseParam(name, str);
+  }
+
+  protected Byte parse(String str) throws Exception {
+    return Byte.parseByte(str);
+  }
+
+  @Override
+  protected String getDomain() {
+    return "a byte";
+  }
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.wsrs;
+
+import org.apache.hadoop.util.StringUtils;
+
+import java.util.Arrays;
+
+public abstract class EnumParam<E extends Enum<E>> extends Param<E> {
+  Class<E> klass;
+
+  public EnumParam(String label, String str, Class<E> e) {
+    klass = e;
+    value = parseParam(label, str);
+  }
+
+  protected E parse(String str) throws Exception {
+    return Enum.valueOf(klass, str.toUpperCase());
+  }
+
+  @Override
+  protected String getDomain() {
+    return StringUtils.join(",", Arrays.asList(klass.getEnumConstants()));
+  }
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ExceptionProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ExceptionProvider.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ExceptionProvider.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ExceptionProvider.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.wsrs;
+
+import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.ext.ExceptionMapper;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+public class ExceptionProvider implements ExceptionMapper<Throwable> {
+  private static Logger LOG = LoggerFactory.getLogger(ExceptionProvider.class);
+
+  private static final String ENTER = System.getProperty("line.separator");
+
+  protected Response createResponse(Response.Status status, Throwable throwable) {
+    Map<String, Object> json = new LinkedHashMap<String, Object>();
+    json.put(HttpFSFileSystem.ERROR_MESSAGE_JSON, getOneLineMessage(throwable));
+    json.put(HttpFSFileSystem.ERROR_EXCEPTION_JSON, throwable.getClass().getSimpleName());
+    json.put(HttpFSFileSystem.ERROR_CLASSNAME_JSON, throwable.getClass().getName());
+    Map<String, Object> response = new LinkedHashMap<String, Object>();
+    response.put(HttpFSFileSystem.ERROR_JSON, json);
+    log(status, throwable);
+    return Response.status(status).type(MediaType.APPLICATION_JSON).entity(response).build();
+  }
+
+  protected String getOneLineMessage(Throwable throwable) {
+    String message = throwable.getMessage();
+    if (message != null) {
+      int i = message.indexOf(ENTER);
+      if (i > -1) {
+        message = message.substring(0, i);
+      }
+    }
+    return message;
+  }
+
+  protected void log(Response.Status status, Throwable throwable) {
+    LOG.debug("{}", throwable.getMessage(), throwable);
+  }
+
+  @Override
+  public Response toResponse(Throwable throwable) {
+    return createResponse(Response.Status.BAD_REQUEST, throwable);
+  }
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/InputStreamEntity.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/InputStreamEntity.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/InputStreamEntity.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/InputStreamEntity.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.wsrs;
+
+import org.apache.hadoop.io.IOUtils;
+
+import javax.ws.rs.core.StreamingOutput;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+
+public class InputStreamEntity implements StreamingOutput {
+  private InputStream is;
+  private long offset;
+  private long len;
+
+  public InputStreamEntity(InputStream is, long offset, long len) {
+    this.is = is;
+    this.offset = offset;
+    this.len = len;
+  }
+
+  public InputStreamEntity(InputStream is) {
+    this(is, 0, -1);
+  }
+
+  @Override
+  public void write(OutputStream os) throws IOException {
+    is.skip(offset);
+    if (len == -1) {
+      IOUtils.copyBytes(is, os, 4096, true);
+    } else {
+      IOUtils.copyBytes(is, os, len, true);
+    }
+  }
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/IntegerParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/IntegerParam.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/IntegerParam.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/IntegerParam.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.wsrs;
+
+public abstract class IntegerParam extends Param<Integer> {
+
+  public IntegerParam(String name, String str) {
+    value = parseParam(name, str);
+  }
+
+  protected Integer parse(String str) throws Exception {
+    return Integer.parseInt(str);
+  }
+
+  @Override
+  protected String getDomain() {
+    return "an integer";
+  }
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONMapProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONMapProvider.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONMapProvider.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONMapProvider.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.wsrs;
+
+import org.json.simple.JSONObject;
+
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.MultivaluedMap;
+import javax.ws.rs.ext.MessageBodyWriter;
+import javax.ws.rs.ext.Provider;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.lang.annotation.Annotation;
+import java.lang.reflect.Type;
+import java.util.Map;
+
+@Provider
+@Produces(MediaType.APPLICATION_JSON)
+public class JSONMapProvider implements MessageBodyWriter<Map> {
+  private static final String ENTER = System.getProperty("line.separator");
+
+  @Override
+  public boolean isWriteable(Class<?> aClass, Type type, Annotation[] annotations, MediaType mediaType) {
+    return Map.class.isAssignableFrom(aClass);
+  }
+
+  @Override
+  public long getSize(Map map, Class<?> aClass, Type type, Annotation[] annotations, MediaType mediaType) {
+    return -1;
+  }
+
+  @Override
+  public void writeTo(Map map, Class<?> aClass, Type type, Annotation[] annotations,
+                      MediaType mediaType, MultivaluedMap<String, Object> stringObjectMultivaluedMap,
+                      OutputStream outputStream) throws IOException, WebApplicationException {
+    Writer writer = new OutputStreamWriter(outputStream);
+    JSONObject.writeJSONString(map, writer);
+    writer.write(ENTER);
+    writer.flush();
+  }
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONProvider.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONProvider.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONProvider.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.wsrs;
+
+import org.json.simple.JSONStreamAware;
+
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.MultivaluedMap;
+import javax.ws.rs.ext.MessageBodyWriter;
+import javax.ws.rs.ext.Provider;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.lang.annotation.Annotation;
+import java.lang.reflect.Type;
+
+@Provider
+@Produces(MediaType.APPLICATION_JSON)
+public class JSONProvider implements MessageBodyWriter<JSONStreamAware> {
+  private static final String ENTER = System.getProperty("line.separator");
+
+  @Override
+  public boolean isWriteable(Class<?> aClass, Type type, Annotation[] annotations, MediaType mediaType) {
+    return JSONStreamAware.class.isAssignableFrom(aClass);
+  }
+
+  @Override
+  public long getSize(JSONStreamAware jsonStreamAware, Class<?> aClass, Type type, Annotation[] annotations,
+                      MediaType mediaType) {
+    return -1;
+  }
+
+  @Override
+  public void writeTo(JSONStreamAware jsonStreamAware, Class<?> aClass, Type type, Annotation[] annotations,
+                      MediaType mediaType, MultivaluedMap<String, Object> stringObjectMultivaluedMap,
+                      OutputStream outputStream) throws IOException, WebApplicationException {
+    Writer writer = new OutputStreamWriter(outputStream);
+    jsonStreamAware.writeJSONString(writer);
+    writer.write(ENTER);
+    writer.flush();
+  }
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/LongParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/LongParam.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/LongParam.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/LongParam.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.wsrs;
+
+public abstract class LongParam extends Param<Long> {
+
+  public LongParam(String name, String str) {
+    value = parseParam(name, str);
+  }
+
+  protected Long parse(String str) throws Exception {
+    return Long.parseLong(str);
+  }
+
+  @Override
+  protected String getDomain() {
+    return "a long";
+  }
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Param.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Param.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Param.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Param.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.wsrs;
+
+import org.apache.hadoop.lib.util.Check;
+
+import java.text.MessageFormat;
+
+public abstract class Param<T> {
+  protected T value;
+
+  public T parseParam(String name, String str) {
+    Check.notNull(name, "name");
+    try {
+      return (str != null && str.trim().length() > 0) ? parse(str) : null;
+    } catch (Exception ex) {
+      throw new IllegalArgumentException(
+        MessageFormat.format("Parameter [{0}], invalid value [{1}], value must be [{2}]",
+                             name, str, getDomain()));
+    }
+  }
+
+  public T value() {
+    return value;
+  }
+
+  protected void setValue(T value) {
+    this.value = value;
+  }
+
+  protected abstract String getDomain();
+
+  protected abstract T parse(String str) throws Exception;
+
+  public String toString() {
+    return value.toString();
+  }
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ShortParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ShortParam.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ShortParam.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ShortParam.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.wsrs;
+
+public abstract class ShortParam extends Param<Short> {
+
+  public ShortParam(String name, String str) {
+    value = parseParam(name, str);
+  }
+
+  protected Short parse(String str) throws Exception {
+    return Short.parseShort(str);
+  }
+
+  @Override
+  protected String getDomain() {
+    return "a short";
+  }
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/StringParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/StringParam.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/StringParam.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/StringParam.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.wsrs;
+
+import org.apache.hadoop.lib.util.Check;
+
+import java.text.MessageFormat;
+import java.util.regex.Pattern;
+
+public abstract class StringParam extends Param<String> {
+  private Pattern pattern;
+
+  public StringParam(String name, String str) {
+    this(name, str, null);
+  }
+
+  public StringParam(String name, String str, Pattern pattern) {
+    this.pattern = pattern;
+    value = parseParam(name, str);
+  }
+
+  public String parseParam(String name, String str) {
+    String ret = null;
+    Check.notNull(name, "name");
+    try {
+      if (str != null) {
+        str = str.trim();
+        if (str.length() > 0) {
+          return parse(str);
+        }
+      }
+    } catch (Exception ex) {
+      throw new IllegalArgumentException(
+        MessageFormat.format("Parameter [{0}], invalid value [{1}], value must be [{2}]",
+                             name, str, getDomain()));
+    }
+    return ret;
+  }
+
+  protected String parse(String str) throws Exception {
+    if (pattern != null) {
+      if (!pattern.matcher(str).matches()) {
+        throw new IllegalArgumentException("Invalid value");
+      }
+    }
+    return str;
+  }
+
+  @Override
+  protected String getDomain() {
+    return (pattern == null) ? "a string" : pattern.pattern();
+  }
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/UserProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/UserProvider.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/UserProvider.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/UserProvider.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.wsrs;
+
+import com.sun.jersey.api.core.HttpContext;
+import com.sun.jersey.core.spi.component.ComponentContext;
+import com.sun.jersey.core.spi.component.ComponentScope;
+import com.sun.jersey.server.impl.inject.AbstractHttpContextInjectable;
+import com.sun.jersey.spi.inject.Injectable;
+import com.sun.jersey.spi.inject.InjectableProvider;
+import org.slf4j.MDC;
+
+import javax.ws.rs.core.Context;
+import javax.ws.rs.ext.Provider;
+import java.lang.reflect.Type;
+import java.security.Principal;
+import java.util.regex.Pattern;
+
+@Provider
+public class UserProvider extends AbstractHttpContextInjectable<Principal> implements
+  InjectableProvider<Context, Type> {
+
+  public static final String USER_NAME_PARAM = "user.name";
+
+  public static final Pattern USER_PATTERN = Pattern.compile("[_a-zA-Z0-9]+");
+
+  private static class UserParam extends StringParam {
+
+    public UserParam(String user) {
+      super(USER_NAME_PARAM, user, USER_PATTERN);
+    }
+  }
+
+  @Override
+  public Principal getValue(HttpContext httpContext) {
+    Principal principal = httpContext.getRequest().getUserPrincipal();
+    if (principal == null) {
+      final String user = httpContext.getRequest().getQueryParameters().getFirst(USER_NAME_PARAM);
+      if (user != null) {
+        principal = new Principal() {
+          @Override
+          public String getName() {
+            return new UserParam(user).value();
+          }
+        };
+      }
+    }
+    if (principal != null) {
+      MDC.put("user", principal.getName());
+    }
+    return principal;
+  }
+
+  @Override
+  public ComponentScope getScope() {
+    return ComponentScope.PerRequest;
+  }
+
+  @Override
+  public Injectable getInjectable(ComponentContext componentContext, Context context, Type type) {
+    return (type.equals(Principal.class)) ? this : null;
+  }
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh Thu Dec  8 19:25:28 2011
@@ -0,0 +1,167 @@
+#!/bin/bash
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#  http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+
+# resolve links - $0 may be a softlink
+PRG="${0}"
+
+while [ -h "${PRG}" ]; do
+  ls=`ls -ld "${PRG}"`
+  link=`expr "$ls" : '.*-> \(.*\)$'`
+  if expr "$link" : '/.*' > /dev/null; then
+    PRG="$link"
+  else
+    PRG=`dirname "${PRG}"`/"$link"
+  fi
+done
+
+BASEDIR=`dirname ${PRG}`
+BASEDIR=`cd ${BASEDIR}/..;pwd`
+
+
+function print() {
+  if [ "${HTTPFS_SILENT}" != "true" ]; then
+    echo "$@"
+  fi
+}
+
+# if HTTPFS_HOME is already set warn it will be ignored
+#
+if [ "${HTTPFS_HOME}" != "" ]; then
+  echo "WARNING: current setting of HTTPFS_HOME ignored"
+fi
+
+print
+
+# setting HTTPFS_HOME to the installation dir, it cannot be changed
+#
+export HTTPFS_HOME=${BASEDIR}
+httpfs_home=${HTTPFS_HOME}
+print "Setting HTTPFS_HOME:          ${HTTPFS_HOME}"
+
+# if the installation has a env file, source it
+# this is for native packages installations
+#
+if [ -e "${HTTPFS_HOME}/bin/httpfs-env.sh" ]; then
+  print "Sourcing:                    ${HTTPFS_HOME}/bin/httpfs-env.sh"
+  source ${HTTPFS_HOME}/bin/HTTPFS-env.sh
+  grep "^ *export " ${HTTPFS_HOME}/bin/httpfs-env.sh | sed 's/ *export/  setting/'
+fi
+
+# verify that the sourced env file didn't change HTTPFS_HOME
+# if so, warn and revert
+#
+if [ "${HTTPFS_HOME}" != "${httpfs_home}" ]; then
+  print "WARN: HTTPFS_HOME resetting to ''${HTTPFS_HOME}'' ignored"
+  export HTTPFS_HOME=${httpfs_home}
+  print "  using HTTPFS_HOME:        ${HTTPFS_HOME}"
+fi
+
+if [ "${HTTPFS_CONFIG}" = "" ]; then
+  export HTTPFS_CONFIG=${HTTPFS_HOME}/etc/hadoop
+  print "Setting HTTPFS_CONFIG:        ${HTTPFS_CONFIG}"
+else
+  print "Using   HTTPFS_CONFIG:        ${HTTPFS_CONFIG}"
+fi
+httpfs_config=${HTTPFS_CONFIG}
+
+# if the configuration dir has a env file, source it
+#
+if [ -e "${HTTPFS_CONFIG}/httpfs-env.sh" ]; then
+  print "Sourcing:                    ${HTTPFS_CONFIG}/httpfs-env.sh"
+  source ${HTTPFS_CONFIG}/httpfs-env.sh
+  grep "^ *export " ${HTTPFS_CONFIG}/httpfs-env.sh | sed 's/ *export/  setting/'
+fi
+
+# verify that the sourced env file didn't change HTTPFS_HOME
+# if so, warn and revert
+#
+if [ "${HTTPFS_HOME}" != "${httpfs_home}" ]; then
+  echo "WARN: HTTPFS_HOME resetting to ''${HTTPFS_HOME}'' ignored"
+  export HTTPFS_HOME=${httpfs_home}
+fi
+
+# verify that the sourced env file didn't change HTTPFS_CONFIG
+# if so, warn and revert
+#
+if [ "${HTTPFS_CONFIG}" != "${httpfs_config}" ]; then
+  echo "WARN: HTTPFS_CONFIG resetting to ''${HTTPFS_CONFIG}'' ignored"
+  export HTTPFS_CONFIG=${httpfs_config}
+fi
+
+if [ "${HTTPFS_LOG}" = "" ]; then
+  export HTTPFS_LOG=${HTTPFS_HOME}/logs
+  print "Setting HTTPFS_LOG:           ${HTTPFS_LOG}"
+else
+  print "Using   HTTPFS_LOG:           ${HTTPFS_LOG}"
+fi
+
+if [ ! -f ${HTTPFS_LOG} ]; then
+  mkdir -p ${HTTPFS_LOG}
+fi
+
+if [ "${HTTPFS_TEMP}" = "" ]; then
+  export HTTPFS_TEMP=${HTTPFS_HOME}/temp
+  print "Setting HTTPFS_TEMP:           ${HTTPFS_TEMP}"
+else
+  print "Using   HTTPFS_TEMP:           ${HTTPFS_TEMP}"
+fi
+
+if [ ! -f ${HTTPFS_TEMP} ]; then
+  mkdir -p ${HTTPFS_TEMP}
+fi
+
+if [ "${HTTPFS_HTTP_PORT}" = "" ]; then
+  export HTTPFS_HTTP_PORT=14000
+  print "Setting HTTPFS_HTTP_PORT:     ${HTTPFS_HTTP_PORT}"
+else
+  print "Using   HTTPFS_HTTP_PORT:     ${HTTPFS_HTTP_PORT}"
+fi
+
+if [ "${HTTPFS_ADMIN_PORT}" = "" ]; then
+  export HTTPFS_ADMIN_PORT=`expr $HTTPFS_HTTP_PORT +  1`
+  print "Setting HTTPFS_ADMIN_PORT:     ${HTTPFS_ADMIN_PORT}"
+else
+  print "Using   HTTPFS_ADMIN_PORT:     ${HTTPFS_ADMIN_PORT}"
+fi
+
+if [ "${HTTPFS_HTTP_HOSTNAME}" = "" ]; then
+  export HTTPFS_HTTP_HOSTNAME=`hostname -f`
+  print "Setting HTTPFS_HTTP_HOSTNAME: ${HTTPFS_HTTP_HOSTNAME}"
+else
+  print "Using   HTTPFS_HTTP_HOSTNAME: ${HTTPFS_HTTP_HOSTNAME}"
+fi
+
+if [ "${CATALINA_BASE}" = "" ]; then
+  export CATALINA_BASE=${HTTPFS_HOME}/share/hadoop/httpfs/tomcat
+  print "Setting CATALINA_BASE:       ${CATALINA_BASE}"
+else
+  print "Using   CATALINA_BASE:       ${CATALINA_BASE}"
+fi
+
+if [ "${CATALINA_OUT}" = "" ]; then
+  export CATALINA_OUT=${HTTPFS_LOG}/httpfs-catalina.out
+  print "Setting CATALINA_OUT:        ${CATALINA_OUT}"
+else
+  print "Using   CATALINA_OUT:        ${CATALINA_OUT}"
+fi
+
+if [ "${CATALINA_PID}" = "" ]; then
+  export CATALINA_PID=/tmp/httpfs.pid
+  print "Setting CATALINA_PID:        ${CATALINA_PID}"
+else
+  print "Using   CATALINA_PID:        ${CATALINA_PID}"
+fi
+
+print

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/default-log4j.properties
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/default-log4j.properties?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/default-log4j.properties (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/default-log4j.properties Thu Dec  8 19:25:28 2011
@@ -0,0 +1,20 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#  http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.Target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{ABSOLUTE} %5p %c{1}:%L - %m%n
+log4j.rootLogger=INFO, console
+
+

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml Thu Dec  8 19:25:28 2011
@@ -0,0 +1,204 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<configuration>
+
+  <!-- HttpFSServer Server -->
+
+  <property>
+    <name>httpfs.buffer.size</name>
+    <value>4096</value>
+    <description>
+      The buffer size used by a read/write request when streaming data from/to
+      HDFS.
+    </description>
+  </property>
+
+  <!-- HttpFSServer Services -->
+
+  <property>
+    <name>httpfs.services</name>
+    <value>
+      org.apache.hadoop.lib.service.instrumentation.InstrumentationService,
+      org.apache.hadoop.lib.service.scheduler.SchedulerService,
+      org.apache.hadoop.lib.service.security.GroupsService,
+      org.apache.hadoop.lib.service.security.ProxyUserService,
+      org.apache.hadoop.lib.service.hadoop.FileSystemAccessService
+    </value>
+    <description>
+      Services used by the httpfs server.
+    </description>
+  </property>
+
+  <!-- Kerberos Configuration -->
+
+  <property>
+    <name>kerberos.realm</name>
+    <value>LOCALHOST</value>
+    <description>
+      Kerberos realm, used only if Kerberos authentication is used between
+      the clients and httpfs or between HttpFS and HDFS.
+
+      This property is only used to resolve other properties within this
+      configuration file.
+    </description>
+  </property>
+
+  <!-- HttpFSServer Security Configuration -->
+
+  <property>
+    <name>httpfs.hostname</name>
+    <value>${httpfs.http.hostname}</value>
+    <description>
+      Property used to synthetize the HTTP Kerberos principal used by httpfs.
+
+      This property is only used to resolve other properties within this
+      configuration file.
+    </description>
+  </property>
+
+  <property>
+    <name>httpfs.authentication.type</name>
+    <value>simple</value>
+    <description>
+      Defines the authentication mechanism used by httpfs for its HTTP clients.
+
+      Valid values are 'simple' and 'kerberos'.
+
+      If using 'simple' HTTP clients must specify the username with the
+      'user.name' query string parameter.
+
+      If using 'kerberos' HTTP clients must use HTTP SPNEGO.
+    </description>
+  </property>
+
+  <property>
+    <name>httpfs.authentication.kerberos.principal</name>
+    <value>HTTP/${httpfs.hostname}@${kerberos.realm}</value>
+    <description>
+      The HTTP Kerberos principal used by HttpFS in the HTTP endpoint.
+
+      The HTTP Kerberos principal MUST start with 'HTTP/' per Kerberos
+      HTTP SPENGO specification.
+    </description>
+  </property>
+
+  <property>
+    <name>httpfs.authentication.kerberos.keytab</name>
+    <value>${user.home}/httpfs.keytab</value>
+    <description>
+      The Kerberos keytab file with the credentials for the
+      HTTP Kerberos principal used by httpfs in the HTTP endpoint.
+    </description>
+  </property>
+
+  <!-- HttpFSServer proxy user Configuration -->
+
+  <property>
+    <name>httpfs.proxyuser.#USER#.hosts</name>
+    <value>*</value>
+    <description>
+      List of hosts the '#USER#' user is allowed to perform 'doAs'
+      operations.
+
+      The '#USER#' must be replaced with the username o the user who is
+      allowed to perform 'doAs' operations.
+
+      The value can be the '*' wildcard or a list of hostnames.
+
+      For multiple users copy this property and replace the user name
+      in the property name.
+    </description>
+  </property>
+
+  <property>
+    <name>httpfs.proxyuser.#USER#.groups</name>
+    <value>*</value>
+    <description>
+      List of groups the '#USER#' user is allowed to impersonate users
+      from to perform 'doAs' operations.
+
+      The '#USER#' must be replaced with the username o the user who is
+      allowed to perform 'doAs' operations.
+
+      The value can be the '*' wildcard or a list of groups.
+
+      For multiple users copy this property and replace the user name
+      in the property name.
+    </description>
+  </property>
+
+  <!-- FileSystemAccess Namenode Configuration -->
+
+  <property>
+    <name>namenode.hostname</name>
+    <value>localhost</value>
+    <description>
+      The HDFS Namenode host the httpfs server connects to perform file
+      system operations.
+
+      This property is only used to resolve other properties within this
+      configuration file.
+    </description>
+  </property>
+
+  <property>
+    <name>httpfs.hadoop.conf:fs.default.name</name>
+    <value>hdfs://${namenode.hostname}:8020</value>
+    <description>
+      The HDFS Namenode URI the httpfs server connects to perform file
+      system operations.
+    </description>
+  </property>
+
+  <!-- FileSystemAccess Namenode Security Configuration -->
+
+  <property>
+    <name>httpfs.hadoop.authentication.type</name>
+    <value>simple</value>
+    <description>
+      Defines the authentication mechanism used by httpfs to connect to
+      the HDFS Namenode.
+
+      Valid values are 'simple' and 'kerberos'.
+    </description>
+  </property>
+
+  <property>
+    <name>httpfs.hadoop.authentication.kerberos.keytab</name>
+    <value>${user.home}/httpfs.keytab</value>
+    <description>
+      The Kerberos keytab file with the credentials for the
+      Kerberos principal used by httpfs to connect to the HDFS Namenode.
+    </description>
+  </property>
+
+  <property>
+    <name>httpfs.hadoop.authentication.kerberos.principal</name>
+    <value>${user.name}/${httpfs.hostname}@${kerberos.realm}</value>
+    <description>
+      The Kerberos principal used by httpfs to connect to the HDFS Namenode.
+    </description>
+  </property>
+
+  <property>
+    <name>httpfs.hadoop.conf:dfs.namenode.kerberos.principal</name>
+    <value>hdfs/${namenode.hostname}@${kerberos.realm}</value>
+    <description>
+      The HDFS Namenode Kerberos principal.
+    </description>
+  </property>
+
+</configuration>

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs.properties
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs.properties?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs.properties (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs.properties Thu Dec  8 19:25:28 2011
@@ -0,0 +1,21 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#  http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+
+httpfs.version=${project.version}
+
+httpfs.source.repository=${httpfs.source.repository}
+httpfs.source.revision=${httpfs.source.revision}
+
+httpfs.build.username=${user.name}
+httpfs.build.timestamp=${httpfs.build.timestamp}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh Thu Dec  8 19:25:28 2011
@@ -0,0 +1,62 @@
+#!/bin/bash
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#  http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+
+# resolve links - $0 may be a softlink
+PRG="${0}"
+
+while [ -h "${PRG}" ]; do
+  ls=`ls -ld "${PRG}"`
+  link=`expr "$ls" : '.*-> \(.*\)$'`
+  if expr "$link" : '/.*' > /dev/null; then
+    PRG="$link"
+  else
+    PRG=`dirname "${PRG}"`/"$link"
+  fi
+done
+
+BASEDIR=`dirname ${PRG}`
+BASEDIR=`cd ${BASEDIR}/..;pwd`
+
+source ${BASEDIR}/libexec/httpfs-config.sh
+
+# The Java System property 'httpfs.http.port' it is not used by HttpFS,
+# it is used in Tomcat's server.xml configuration file
+#
+print "Using   CATALINA_OPTS:       ${CATALINA_OPTS}"
+
+catalina_opts="-Dhttpfs.home.dir=${HTTPFS_HOME}";
+catalina_opts="${catalina_opts} -Dhttpfs.config.dir=${HTTPFS_CONFIG}";
+catalina_opts="${catalina_opts} -Dhttpfs.log.dir=${HTTPFS_LOG}";
+catalina_opts="${catalina_opts} -Dhttpfs.temp.dir=${HTTPFS_TEMP}";
+catalina_opts="${catalina_opts} -Dhttpfs.admin.port=${HTTPFS_ADMIN_PORT}";
+catalina_opts="${catalina_opts} -Dhttpfs.http.port=${HTTPFS_HTTP_PORT}";
+catalina_opts="${catalina_opts} -Dhttpfs.http.hostname=${HTTPFS_HTTP_HOSTNAME}";
+
+print "Adding to CATALINA_OPTS:     ${catalina_opts}"
+
+export CATALINA_OPTS="${CATALINA_OPTS} ${catalina_opts}"
+
+# A bug in catalina.sh script does not use CATALINA_OPTS for stopping the server
+#
+if [ "${1}" = "stop" ]; then
+  export JAVA_OPTS=${CATALINA_OPTS}
+fi
+
+if [ "${HTTPFS_SILENT}" != "true" ]; then
+  ${BASEDIR}/share/hadoop/httpfs/tomcat/bin/catalina.sh "$@"
+else
+  ${BASEDIR}/share/hadoop/httpfs/tomcat/bin/catalina.sh "$@" > /dev/null
+fi
+

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/tomcat/ROOT/WEB-INF/web.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/tomcat/ROOT/WEB-INF/web.xml?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/tomcat/ROOT/WEB-INF/web.xml (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/tomcat/ROOT/WEB-INF/web.xml Thu Dec  8 19:25:28 2011
@@ -0,0 +1,16 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<web-app version="2.4" xmlns="http://java.sun.com/xml/ns/j2ee">
+</web-app>



Mime
View raw message