hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From t...@apache.org
Subject svn commit: r1560505 - in /hadoop/common/branches/branch-2/hadoop-hdfs-project: hadoop-hdfs-httpfs/ hadoop-hdfs-httpfs/src/main/conf/ hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/ hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/...
Date Wed, 22 Jan 2014 20:39:54 GMT
Author: tucu
Date: Wed Jan 22 20:39:53 2014
New Revision: 1560505

URL: http://svn.apache.org/r1560505
Log:
HDFS-5703. Add support for HTTPS and swebhdfs to HttpFS. (tucu)

Added:
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpsFSFileSystem.java
      - copied unchanged from r1560504, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpsFSFileSystem.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/tomcat/ssl-server.xml
      - copied unchanged from r1560504, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/tomcat/ssl-server.xml
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFWithSWebhdfsFileSystem.java
      - copied unchanged from r1560504, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFWithSWebhdfsFileSystem.java
Modified:
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSKerberosAuthenticator.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSUtils.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebApp.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/DelegationTokenIdentifier.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/DelegationTokenManagerService.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/apt/ServerSetup.apt.vm
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSKerberosAuthenticationHandler.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/security/TestDelegationTokenManagerService.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml?rev=1560505&r1=1560504&r2=1560505&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml Wed Jan
22 20:39:53 2014
@@ -554,6 +554,9 @@
                     <delete file="${httpfs.tomcat.dist.dir}/conf/server.xml"/>
                     <copy file="${basedir}/src/main/tomcat/server.xml"
                           toDir="${httpfs.tomcat.dist.dir}/conf"/>
+                    <delete file="${httpfs.tomcat.dist.dir}/conf/ssl-server.xml"/>
+                    <copy file="${basedir}/src/main/tomcat/ssl-server.xml"
+                          toDir="${httpfs.tomcat.dist.dir}/conf"/>
                     <delete file="${httpfs.tomcat.dist.dir}/conf/logging.properties"/>
                     <copy file="${basedir}/src/main/tomcat/logging.properties"
                           toDir="${httpfs.tomcat.dist.dir}/conf"/>

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh?rev=1560505&r1=1560504&r2=1560505&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh
(original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh
Wed Jan 22 20:39:53 2014
@@ -39,3 +39,15 @@
 # The hostname HttpFS server runs on
 #
 # export HTTPFS_HTTP_HOSTNAME=`hostname -f`
+
+# Indicates if HttpFS is using SSL
+#
+# export HTTPFS_SSL_ENABLED=false
+
+# The location of the SSL keystore if using SSL
+#
+# export HTTPFS_SSL_KEYSTORE_FILE=${HOME}/.keystore
+
+# The password of the SSL keystore if using SSL
+#
+# export HTTPFS_SSL_KEYSTORE_PASS=password

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java?rev=1560505&r1=1560504&r2=1560505&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
(original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
Wed Jan 22 20:39:53 2014
@@ -243,7 +243,7 @@ public class HttpFSFileSystem extends Fi
     if (makeQualified) {
       path = makeQualified(path);
     }
-    final URL url = HttpFSUtils.createHttpURL(path, params);
+    final URL url = HttpFSUtils.createURL(path, params);
     return doAsRealUserIfNecessary(new Callable<HttpURLConnection>() {
       @Override
       public HttpURLConnection call() throws Exception {

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSKerberosAuthenticator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSKerberosAuthenticator.java?rev=1560505&r1=1560504&r2=1560505&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSKerberosAuthenticator.java
(original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSKerberosAuthenticator.java
Wed Jan 22 20:39:53 2014
@@ -123,7 +123,7 @@ public class HttpFSKerberosAuthenticator
     Map<String, String> params = new HashMap<String, String>();
     params.put(OP_PARAM, op.toString());
     params.put(RENEWER_PARAM,renewer);
-    URL url = HttpFSUtils.createHttpURL(new Path(fsURI), params);
+    URL url = HttpFSUtils.createURL(new Path(fsURI), params);
     AuthenticatedURL aUrl =
       new AuthenticatedURL(new HttpFSKerberosAuthenticator());
     try {
@@ -150,7 +150,7 @@ public class HttpFSKerberosAuthenticator
     params.put(OP_PARAM,
                DelegationTokenOperation.RENEWDELEGATIONTOKEN.toString());
     params.put(TOKEN_PARAM, dToken.encodeToUrlString());
-    URL url = HttpFSUtils.createHttpURL(new Path(fsURI), params);
+    URL url = HttpFSUtils.createURL(new Path(fsURI), params);
     AuthenticatedURL aUrl =
       new AuthenticatedURL(new HttpFSKerberosAuthenticator());
     try {
@@ -172,7 +172,7 @@ public class HttpFSKerberosAuthenticator
     params.put(OP_PARAM,
                DelegationTokenOperation.CANCELDELEGATIONTOKEN.toString());
     params.put(TOKEN_PARAM, dToken.encodeToUrlString());
-    URL url = HttpFSUtils.createHttpURL(new Path(fsURI), params);
+    URL url = HttpFSUtils.createURL(new Path(fsURI), params);
     AuthenticatedURL aUrl =
       new AuthenticatedURL(new HttpFSKerberosAuthenticator());
     try {

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSUtils.java?rev=1560505&r1=1560504&r2=1560505&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSUtils.java
(original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSUtils.java
Wed Jan 22 20:39:53 2014
@@ -55,17 +55,21 @@ public class HttpFSUtils {
    *
    * @return a <code>URL</code> for the HttpFSServer server,
    *
-   * @throws IOException thrown if an IO error occurrs.
+   * @throws IOException thrown if an IO error occurs.
    */
-  static URL createHttpURL(Path path, Map<String, String> params)
+  static URL createURL(Path path, Map<String, String> params)
     throws IOException {
     URI uri = path.toUri();
     String realScheme;
     if (uri.getScheme().equalsIgnoreCase(HttpFSFileSystem.SCHEME)) {
       realScheme = "http";
+    } else if (uri.getScheme().equalsIgnoreCase(HttpsFSFileSystem.SCHEME)) {
+      realScheme = "https";
+
     } else {
       throw new IllegalArgumentException(MessageFormat.format(
-        "Invalid scheme [{0}] it should be 'webhdfs'", uri));
+        "Invalid scheme [{0}] it should be '" + HttpFSFileSystem.SCHEME + "' " +
+            "or '" + HttpsFSFileSystem.SCHEME + "'", uri));
     }
     StringBuilder sb = new StringBuilder();
     sb.append(realScheme).append("://").append(uri.getAuthority()).

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebApp.java?rev=1560505&r1=1560504&r2=1560505&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebApp.java
(original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebApp.java
Wed Jan 22 20:39:53 2014
@@ -94,11 +94,11 @@ public class HttpFSServerWebApp extends 
    */
   @Override
   public void init() throws ServerException {
-    super.init();
     if (SERVER != null) {
       throw new RuntimeException("HttpFSServer server already initialized");
     }
     SERVER = this;
+    super.init();
     adminGroup = getConfig().get(getPrefixedName(CONF_ADMIN_GROUP), "admin");
     LOG.info("Connects to Namenode [{}]",
              get().get(FileSystemAccess.class).getFileSystemConfiguration().

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/DelegationTokenIdentifier.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/DelegationTokenIdentifier.java?rev=1560505&r1=1560504&r2=1560505&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/DelegationTokenIdentifier.java
(original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/DelegationTokenIdentifier.java
Wed Jan 22 20:39:53 2014
@@ -29,30 +29,33 @@ import org.apache.hadoop.security.token.
 public class DelegationTokenIdentifier
   extends AbstractDelegationTokenIdentifier {
 
-  public static final Text KIND_NAME = WebHdfsFileSystem.TOKEN_KIND;
+  private Text kind = WebHdfsFileSystem.TOKEN_KIND;
 
-  public DelegationTokenIdentifier() {
+  public DelegationTokenIdentifier(Text kind) {
+    this.kind = kind;
   }
 
   /**
    * Create a new delegation token identifier
    *
+   * @param kind token kind
    * @param owner the effective username of the token owner
    * @param renewer the username of the renewer
    * @param realUser the real username of the token owner
    */
-  public DelegationTokenIdentifier(Text owner, Text renewer, Text realUser) {
+  public DelegationTokenIdentifier(Text kind, Text owner, Text renewer,
+      Text realUser) {
     super(owner, renewer, realUser);
+    this.kind = kind;
   }
 
-
   /**
    * Returns the kind, <code>TOKEN_KIND</code>.
    * @return returns <code>TOKEN_KIND</code>.
    */
   @Override
   public Text getKind() {
-    return KIND_NAME;
+    return kind;
   }
 
 }

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/DelegationTokenManagerService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/DelegationTokenManagerService.java?rev=1560505&r1=1560504&r2=1560505&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/DelegationTokenManagerService.java
(original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/DelegationTokenManagerService.java
Wed Jan 22 20:39:53 2014
@@ -19,6 +19,8 @@ package org.apache.hadoop.lib.service.se
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.http.server.HttpFSServerWebApp;
+import org.apache.hadoop.hdfs.web.SWebHdfsFileSystem;
+import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.lib.server.BaseService;
 import org.apache.hadoop.lib.server.ServerException;
@@ -55,6 +57,8 @@ public class DelegationTokenManagerServi
 
   DelegationTokenSecretManager secretManager = null;
 
+  private Text tokenKind;
+
   public DelegationTokenManagerService() {
     super(PREFIX);
   }
@@ -70,7 +74,9 @@ public class DelegationTokenManagerServi
     long updateInterval = getServiceConfig().getLong(UPDATE_INTERVAL, DAY);
     long maxLifetime = getServiceConfig().getLong(MAX_LIFETIME, 7 * DAY);
     long renewInterval = getServiceConfig().getLong(RENEW_INTERVAL, DAY);
-    secretManager = new DelegationTokenSecretManager(updateInterval,
+    tokenKind = (HttpFSServerWebApp.get().isSslEnabled())
+                ? SWebHdfsFileSystem.TOKEN_KIND : WebHdfsFileSystem.TOKEN_KIND;
+    secretManager = new DelegationTokenSecretManager(tokenKind, updateInterval,
                                                      maxLifetime,
                                                      renewInterval, HOUR);
     try {
@@ -122,7 +128,7 @@ public class DelegationTokenManagerServi
       realUser = new Text(ugi.getRealUser().getUserName());
     }
     DelegationTokenIdentifier tokenIdentifier =
-      new DelegationTokenIdentifier(owner, new Text(renewer), realUser);
+      new DelegationTokenIdentifier(tokenKind, owner, new Text(renewer), realUser);
     Token<DelegationTokenIdentifier> token =
       new Token<DelegationTokenIdentifier>(tokenIdentifier, secretManager);
     try {
@@ -188,7 +194,7 @@ public class DelegationTokenManagerServi
     throws DelegationTokenManagerException {
     ByteArrayInputStream buf = new ByteArrayInputStream(token.getIdentifier());
     DataInputStream dis = new DataInputStream(buf);
-    DelegationTokenIdentifier id = new DelegationTokenIdentifier();
+    DelegationTokenIdentifier id = new DelegationTokenIdentifier(tokenKind);
     try {
       id.readFields(dis);
       dis.close();
@@ -203,6 +209,8 @@ public class DelegationTokenManagerServi
   private static class DelegationTokenSecretManager
     extends AbstractDelegationTokenSecretManager<DelegationTokenIdentifier> {
 
+    private Text tokenKind;
+
     /**
      * Create a secret manager
      *
@@ -215,17 +223,18 @@ public class DelegationTokenManagerServi
      * scanned
      * for expired tokens
      */
-    public DelegationTokenSecretManager(long delegationKeyUpdateInterval,
+    public DelegationTokenSecretManager(Text tokenKind, long delegationKeyUpdateInterval,
                                         long delegationTokenMaxLifetime,
                                         long delegationTokenRenewInterval,
                                         long delegationTokenRemoverScanInterval) {
       super(delegationKeyUpdateInterval, delegationTokenMaxLifetime,
             delegationTokenRenewInterval, delegationTokenRemoverScanInterval);
+      this.tokenKind = tokenKind;
     }
 
     @Override
     public DelegationTokenIdentifier createIdentifier() {
-      return new DelegationTokenIdentifier();
+      return new DelegationTokenIdentifier(tokenKind);
     }
 
   }

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java?rev=1560505&r1=1560504&r2=1560505&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java
(original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java
Wed Jan 22 20:39:53 2014
@@ -44,6 +44,7 @@ public abstract class ServerWebApp exten
   private static final String TEMP_DIR = ".temp.dir";
   private static final String HTTP_HOSTNAME = ".http.hostname";
   private static final String HTTP_PORT = ".http.port";
+  public static final String SSL_ENABLED = ".ssl.enabled";
 
   private static ThreadLocal<String> HOME_DIR_TL = new ThreadLocal<String>();
 
@@ -225,4 +226,12 @@ public abstract class ServerWebApp exten
   public void setAuthority(InetSocketAddress authority) {
     this.authority = authority;
   }
+
+
+  /**
+   *
+   */
+  public boolean isSslEnabled() {
+    return Boolean.valueOf(System.getProperty(getName() + SSL_ENABLED, "false"));
+  }
 }

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh?rev=1560505&r1=1560504&r2=1560505&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh
(original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh
Wed Jan 22 20:39:53 2014
@@ -143,6 +143,27 @@ else
   print "Using   HTTPFS_HTTP_HOSTNAME: ${HTTPFS_HTTP_HOSTNAME}"
 fi
 
+if [ "${HTTPFS_SSL_ENABLED}" = "" ]; then
+  export HTTPFS_SSL_ENABLED="false"
+  print "Setting HTTPFS_SSL_ENABLED: ${HTTPFS_SSL_ENABLED}"
+else
+  print "Using   HTTPFS_SSL_ENABLED: ${HTTPFS_SSL_ENABLED}"
+fi
+
+if [ "${HTTPFS_SSL_KEYSTORE_FILE}" = "" ]; then
+  export HTTPFS_SSL_KEYSTORE_FILE=${HOME}/.keystore
+  print "Setting HTTPFS_SSL_KEYSTORE_FILE:     ${HTTPFS_SSL_KEYSTORE_FILE}"
+else
+  print "Using   HTTPFS_SSL_KEYSTORE_FILE:     ${HTTPFS_SSL_KEYSTORE_FILE}"
+fi
+
+if [ "${HTTPFS_SSL_KEYSTORE_PASS}" = "" ]; then
+  export HTTPFS_SSL_KEYSTORE_PASS=password
+  print "Setting HTTPFS_SSL_KEYSTORE_PASS:     ${HTTPFS_SSL_KEYSTORE_PASS}"
+else
+  print "Using   HTTPFS_SSL_KEYSTORE_PASS:     ${HTTPFS_SSL_KEYSTORE_PASS}"
+fi
+
 if [ "${CATALINA_BASE}" = "" ]; then
   export CATALINA_BASE=${HTTPFS_HOME}/share/hadoop/httpfs/tomcat
   print "Setting CATALINA_BASE:       ${CATALINA_BASE}"

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh?rev=1560505&r1=1560504&r2=1560505&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh
(original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh
Wed Jan 22 20:39:53 2014
@@ -43,6 +43,9 @@ catalina_opts="${catalina_opts} -Dhttpfs
 catalina_opts="${catalina_opts} -Dhttpfs.admin.port=${HTTPFS_ADMIN_PORT}";
 catalina_opts="${catalina_opts} -Dhttpfs.http.port=${HTTPFS_HTTP_PORT}";
 catalina_opts="${catalina_opts} -Dhttpfs.http.hostname=${HTTPFS_HTTP_HOSTNAME}";
+catalina_opts="${catalina_opts} -Dhttpfs.ssl.enabled=${HTTPFS_SSL_ENABLED}";
+catalina_opts="${catalina_opts} -Dhttpfs.ssl.keystore.file=${HTTPFS_SSL_KEYSTORE_FILE}";
+catalina_opts="${catalina_opts} -Dhttpfs.ssl.keystore.pass=${HTTPFS_SSL_KEYSTORE_PASS}";
 
 print "Adding to CATALINA_OPTS:     ${catalina_opts}"
 

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/apt/ServerSetup.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/apt/ServerSetup.apt.vm?rev=1560505&r1=1560504&r2=1560505&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/apt/ServerSetup.apt.vm
(original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/apt/ServerSetup.apt.vm
Wed Jan 22 20:39:53 2014
@@ -118,4 +118,46 @@ Transfer-Encoding: chunked
   HttpFS supports the following {{{./httpfs-default.html}configuration properties}}
   in the HttpFS's <<<conf/httpfs-site.xml>>> configuration file.
 
+* HttpFS over HTTPS (SSL)
+
+  To configure HttpFS to work over SSL edit the {{httpfs-env.sh}} script in the
+  configuration directory setting the {{HTTPFS_SSL_ENABLED}} to {{true}}.
+
+  In addition, the following 2 properties may be defined (shown with default
+  values):
+
+    * HTTPFS_SSL_KEYSTORE_FILE=${HOME}/.keystore
+
+    * HTTPFS_SSL_KEYSTORE_PASS=password
+
+  In the HttpFS <<<tomcat/conf>>> directory, replace the <<<server.xml>>>
file
+  with the  <<<ssl-server.xml>>> file.
+
+
+  You need to create an SSL certificate for the HttpFS server. As the
+  <<<httpfs>>> Unix user, using the Java <<<keytool>>>
command to create the
+  SSL certificate:
+
++---+
+$ keytool -genkey -alias tomcat -keyalg RSA
++---+
+
+  You will be asked a series of questions in an interactive prompt.  It will
+  create the keystore file, which will be named <<.keystore>> and located in
the
+  <<<httpfs>>> user home directory.
+
+  The password you enter for "keystore password" must match the  value of the
+  <<<HTTPFS_SSL_KEYSTORE_PASS>>> environment variable set in the
+  <<<httpfs-env.sh>>> script in the configuration directory.
+
+  The answer to "What is your first and last name?" (i.e. "CN") must be the
+  hostname of the machine where the HttpFS Server will be running.
+
+  Start HttpFS. It should work over HTTPS.
+
+  Using the Hadoop <<<FileSystem>>> API or the Hadoop FS shell, use the
+  <<<swebhdfs://>>> scheme. Make sure the JVM is picking up the truststore
+  containing the public key of the SSL certificate if using a self-signed
+  certificate.
+
   \[ {{{./index.html}Go Back}} \]

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java?rev=1560505&r1=1560504&r2=1560505&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java
(original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java
Wed Jan 22 20:39:53 2014
@@ -116,10 +116,14 @@ public abstract class BaseTestHttpFSWith
     return HttpFSFileSystem.class;
   }
 
+  protected String getScheme() {
+    return "webhdfs";
+  }
+
   protected FileSystem getHttpFSFileSystem() throws Exception {
     Configuration conf = new Configuration();
     conf.set("fs.webhdfs.impl", getFileSystemClass().getName());
-    URI uri = new URI("webhdfs://" +
+    URI uri = new URI(getScheme() + "://" +
                       TestJettyHelper.getJettyURL().toURI().getAuthority());
     return FileSystem.get(uri, conf);
   }
@@ -127,7 +131,7 @@ public abstract class BaseTestHttpFSWith
   protected void testGet() throws Exception {
     FileSystem fs = getHttpFSFileSystem();
     Assert.assertNotNull(fs);
-    URI uri = new URI("webhdfs://" +
+    URI uri = new URI(getScheme() + "://" +
                       TestJettyHelper.getJettyURL().toURI().getAuthority());
     Assert.assertEquals(fs.getUri(), uri);
     fs.close();

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSKerberosAuthenticationHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSKerberosAuthenticationHandler.java?rev=1560505&r1=1560504&r2=1560505&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSKerberosAuthenticationHandler.java
(original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSKerberosAuthenticationHandler.java
Wed Jan 22 20:39:53 2014
@@ -22,9 +22,13 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
 import org.apache.hadoop.fs.http.client.HttpFSKerberosAuthenticator;
 import org.apache.hadoop.fs.http.client.HttpFSKerberosAuthenticator.DelegationTokenOperation;
+import org.apache.hadoop.hdfs.web.SWebHdfsFileSystem;
+import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
+import org.apache.hadoop.io.Text;
 import org.apache.hadoop.lib.service.DelegationTokenIdentifier;
 import org.apache.hadoop.lib.service.DelegationTokenManager;
 import org.apache.hadoop.lib.service.DelegationTokenManagerException;
+import org.apache.hadoop.lib.servlet.ServerWebApp;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authentication.client.AuthenticationException;
 import org.apache.hadoop.security.authentication.server.AuthenticationHandler;
@@ -51,7 +55,24 @@ public class TestHttpFSKerberosAuthentic
 
   @Test
   @TestDir
-  public void testManagementOperations() throws Exception {
+  public void testManagementOperationsWebHdfsFileSystem() throws Exception {
+    testManagementOperations(WebHdfsFileSystem.TOKEN_KIND);
+  }
+
+  @Test
+  @TestDir
+  public void testManagementOperationsSWebHdfsFileSystem() throws Exception {
+    try {
+      System.setProperty(HttpFSServerWebApp.NAME +
+          ServerWebApp.SSL_ENABLED, "true");
+      testManagementOperations(SWebHdfsFileSystem.TOKEN_KIND);
+    } finally {
+      System.getProperties().remove(HttpFSServerWebApp.NAME +
+          ServerWebApp.SSL_ENABLED);
+    }
+  }
+
+  private void testManagementOperations(Text expectedTokenKind) throws Exception {
     String dir = TestDirHelper.getTestDir().getAbsolutePath();
 
     Configuration httpfsConf = new Configuration(false);
@@ -67,8 +88,8 @@ public class TestHttpFSKerberosAuthentic
 
       testNonManagementOperation(handler);
       testManagementOperationErrors(handler);
-      testGetToken(handler, null);
-      testGetToken(handler, "foo");
+      testGetToken(handler, null, expectedTokenKind);
+      testGetToken(handler, "foo", expectedTokenKind);
       testCancelToken(handler);
       testRenewToken(handler);
 
@@ -112,8 +133,8 @@ public class TestHttpFSKerberosAuthentic
       Mockito.contains("requires SPNEGO"));
   }
 
-  private void testGetToken(AuthenticationHandler handler, String renewer)
-    throws Exception {
+  private void testGetToken(AuthenticationHandler handler, String renewer,
+      Text expectedTokenKind) throws Exception {
     DelegationTokenOperation op = DelegationTokenOperation.GETDELEGATIONTOKEN;
     HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
     HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
@@ -154,6 +175,7 @@ public class TestHttpFSKerberosAuthentic
     Token<DelegationTokenIdentifier> dt = new Token<DelegationTokenIdentifier>();
     dt.decodeFromUrlString(tokenStr);
     HttpFSServerWebApp.get().get(DelegationTokenManager.class).verifyToken(dt);
+    Assert.assertEquals(expectedTokenKind, dt.getKind());
   }
 
   private void testCancelToken(AuthenticationHandler handler)

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/security/TestDelegationTokenManagerService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/security/TestDelegationTokenManagerService.java?rev=1560505&r1=1560504&r2=1560505&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/security/TestDelegationTokenManagerService.java
(original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/security/TestDelegationTokenManagerService.java
Wed Jan 22 20:39:53 2014
@@ -23,6 +23,9 @@ import org.apache.hadoop.fs.http.server.
 import org.apache.hadoop.lib.server.Server;
 import org.apache.hadoop.lib.service.DelegationTokenManager;
 import org.apache.hadoop.lib.service.DelegationTokenManagerException;
+import org.apache.hadoop.lib.service.hadoop.FileSystemAccessService;
+import org.apache.hadoop.lib.service.instrumentation.InstrumentationService;
+import org.apache.hadoop.lib.service.scheduler.SchedulerService;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.test.HTestCase;
@@ -43,9 +46,12 @@ public class TestDelegationTokenManagerS
   public void service() throws Exception {
     String dir = TestDirHelper.getTestDir().getAbsolutePath();
     Configuration conf = new Configuration(false);
-    conf.set("server.services", StringUtils.join(",",
-      Arrays.asList(DelegationTokenManagerService.class.getName())));
-    Server server = new Server("server", dir, dir, dir, dir, conf);
+    conf.set("httpfs.services", StringUtils.join(",",
+      Arrays.asList(InstrumentationService.class.getName(),
+          SchedulerService.class.getName(),
+          FileSystemAccessService.class.getName(),
+          DelegationTokenManagerService.class.getName())));
+    Server server = new HttpFSServerWebApp(dir, dir, dir, dir, conf);
     server.init();
     DelegationTokenManager tm = server.get(DelegationTokenManager.class);
     Assert.assertNotNull(tm);

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java?rev=1560505&r1=1560504&r2=1560505&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java
(original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java
Wed Jan 22 20:39:53 2014
@@ -28,31 +28,46 @@ import org.junit.Test;
 import org.junit.rules.MethodRule;
 import org.junit.runners.model.FrameworkMethod;
 import org.junit.runners.model.Statement;
+import org.mortbay.jetty.Connector;
 import org.mortbay.jetty.Server;
+import org.mortbay.jetty.security.SslSocketConnector;
 
 public class TestJettyHelper implements MethodRule {
-
-  @Test
-  public void dummy() {
+  private boolean ssl;
+  private String keyStoreType;
+  private String keyStore;
+  private String keyStorePassword;
+  private Server server;
+
+  public TestJettyHelper() {
+    this.ssl = false;
+  }
+
+  public TestJettyHelper(String keyStoreType, String keyStore,
+      String keyStorePassword) {
+    ssl = true;
+    this.keyStoreType = keyStoreType;
+    this.keyStore = keyStore;
+    this.keyStorePassword = keyStorePassword;
   }
 
-  private static ThreadLocal<Server> TEST_SERVLET_TL = new InheritableThreadLocal<Server>();
+  private static ThreadLocal<TestJettyHelper> TEST_JETTY_TL =
+      new InheritableThreadLocal<TestJettyHelper>();
 
   @Override
   public Statement apply(final Statement statement, final FrameworkMethod frameworkMethod,
final Object o) {
     return new Statement() {
       @Override
       public void evaluate() throws Throwable {
-        Server server = null;
         TestJetty testJetty = frameworkMethod.getAnnotation(TestJetty.class);
         if (testJetty != null) {
           server = createJettyServer();
         }
         try {
-          TEST_SERVLET_TL.set(server);
+          TEST_JETTY_TL.set(TestJettyHelper.this);
           statement.evaluate();
         } finally {
-          TEST_SERVLET_TL.remove();
+          TEST_JETTY_TL.remove();
           if (server != null && server.isRunning()) {
             try {
               server.stop();
@@ -73,8 +88,19 @@ public class TestJettyHelper implements 
       int port = ss.getLocalPort();
       ss.close();
       Server server = new Server(0);
-      server.getConnectors()[0].setHost(host);
-      server.getConnectors()[0].setPort(port);
+      if (!ssl) {
+        server.getConnectors()[0].setHost(host);
+        server.getConnectors()[0].setPort(port);
+      } else {
+        SslSocketConnector c = new SslSocketConnector();
+        c.setHost(host);
+        c.setPort(port);
+        c.setNeedClientAuth(false);
+        c.setKeystore(keyStore);
+        c.setKeystoreType(keyStoreType);
+        c.setKeyPassword(keyStorePassword);
+        server.setConnectors(new Connector[] {c});
+      }
       return server;
     } catch (Exception ex) {
       throw new RuntimeException("Could not stop embedded servlet container, " + ex.getMessage(),
ex);
@@ -109,11 +135,11 @@ public class TestJettyHelper implements 
    * @return a Jetty server ready to be configured and the started.
    */
   public static Server getJettyServer() {
-    Server server = TEST_SERVLET_TL.get();
-    if (server == null) {
+    TestJettyHelper helper = TEST_JETTY_TL.get();
+    if (helper == null || helper.server == null) {
       throw new IllegalStateException("This test does not use @TestJetty");
     }
-    return server;
+    return helper.server;
   }
 
   /**
@@ -123,12 +149,15 @@ public class TestJettyHelper implements 
    * @return the base URL (SCHEMA://HOST:PORT) of the test Jetty server.
    */
   public static URL getJettyURL() {
-    Server server = TEST_SERVLET_TL.get();
-    if (server == null) {
+    TestJettyHelper helper = TEST_JETTY_TL.get();
+    if (helper == null || helper.server == null) {
       throw new IllegalStateException("This test does not use @TestJetty");
     }
     try {
-      return new URL("http://" + server.getConnectors()[0].getHost() + ":" + server.getConnectors()[0].getPort());
+      String scheme = (helper.ssl) ? "https" : "http";
+      return new URL(scheme + "://" +
+          helper.server.getConnectors()[0].getHost() + ":" +
+          helper.server.getConnectors()[0].getPort());
     } catch (MalformedURLException ex) {
       throw new RuntimeException("It should never happen, " + ex.getMessage(), ex);
     }

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1560505&r1=1560504&r2=1560505&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Wed Jan 22
20:39:53 2014
@@ -34,6 +34,8 @@ Release 2.4.0 - UNRELEASED
     as a collection of storages (see breakdown of tasks below for features and
     contributors).
 
+    HDFS-5703. Add support for HTTPS and swebhdfs to HttpFS. (tucu)
+
   IMPROVEMENTS
 
     HDFS-5267. Remove volatile from LightWeightHashSet. (Junping Du via llu)



Mime
View raw message