hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From t...@apache.org
Subject svn commit: r1365988 [2/3] - in /hadoop/common/trunk/hadoop-hdfs-project: hadoop-hdfs-httpfs/ hadoop-hdfs-httpfs/dev-support/ hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/ hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/...
Date Thu, 26 Jul 2012 13:39:06 GMT
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml?rev=1365988&r1=1365987&r2=1365988&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml Thu Jul 26 13:39:05 2012
@@ -35,6 +35,7 @@
       org.apache.hadoop.lib.service.scheduler.SchedulerService,
       org.apache.hadoop.lib.service.security.GroupsService,
       org.apache.hadoop.lib.service.security.ProxyUserService,
+      org.apache.hadoop.lib.service.security.DelegationTokenManagerService,
       org.apache.hadoop.lib.service.hadoop.FileSystemAccessService
     </value>
     <description>
@@ -88,12 +89,12 @@
     <description>
       Defines the authentication mechanism used by httpfs for its HTTP clients.
 
-      Valid values are 'simple' and 'kerberos'.
+      Valid values are 'simple' or 'kerberos'.
 
       If using 'simple' HTTP clients must specify the username with the
       'user.name' query string parameter.
 
-      If using 'kerberos' HTTP clients must use HTTP SPNEGO.
+      If using 'kerberos' HTTP clients must use HTTP SPNEGO or delegation tokens.
     </description>
   </property>
 
@@ -153,6 +154,32 @@
     </description>
   </property>
 
+  <!-- HttpFS Delegation Token configuration -->
+
+  <property>
+    <name>httpfs.delegation.token.manager.update.interval</name>
+    <value>86400</value>
+    <description>
+      HttpFS delegation token update interval, default 1 day, in seconds.
+    </description>
+  </property>
+
+  <property>
+    <name>httpfs.delegation.token.manager.max.lifetime</name>
+    <value>604800</value>
+    <description>
+      HttpFS delegation token maximum lifetime, default 7 days, in seconds
+    </description>
+  </property>
+
+  <property>
+    <name>httpfs.delegation.token.manager.renewal.interval</name>
+    <value>86400</value>
+    <description>
+      HttpFS delegation token update interval, default 1 day, in seconds.
+    </description>
+  </property>
+
   <!-- FileSystemAccess Namenode Security Configuration -->
 
   <property>

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/webapp/WEB-INF/web.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/webapp/WEB-INF/web.xml?rev=1365988&r1=1365987&r2=1365988&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/webapp/WEB-INF/web.xml (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/webapp/WEB-INF/web.xml Thu Jul 26 13:39:05 2012
@@ -47,7 +47,7 @@
 
   <filter>
     <filter-name>authFilter</filter-name>
-    <filter-class>org.apache.hadoop.fs.http.server.AuthFilter</filter-class>
+    <filter-class>org.apache.hadoop.fs.http.server.HttpFSAuthenticationFilter</filter-class>
   </filter>
 
   <filter>

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystem.java?rev=1365988&r1=1365987&r2=1365988&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystem.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystem.java Thu Jul 26 13:39:05 2012
@@ -25,6 +25,7 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.io.Writer;
+import java.net.URI;
 import java.net.URL;
 import java.security.PrivilegedExceptionAction;
 import java.util.Arrays;
@@ -100,16 +101,24 @@ public class TestHttpFSFileSystem extend
     server.start();
   }
 
+  protected Class getFileSystemClass() {
+    return HttpFSFileSystem.class;
+  }
+
   protected FileSystem getHttpFileSystem() throws Exception {
     Configuration conf = new Configuration();
-    conf.set("fs.http.impl", HttpFSFileSystem.class.getName());
-    return FileSystem.get(TestJettyHelper.getJettyURL().toURI(), conf);
+    conf.set("fs.webhdfs.impl", getFileSystemClass().getName());
+    URI uri = new URI("webhdfs://" +
+                      TestJettyHelper.getJettyURL().toURI().getAuthority());
+    return FileSystem.get(uri, conf);
   }
 
   protected void testGet() throws Exception {
     FileSystem fs = getHttpFileSystem();
     Assert.assertNotNull(fs);
-    Assert.assertEquals(fs.getUri(), TestJettyHelper.getJettyURL().toURI());
+    URI uri = new URI("webhdfs://" +
+                      TestJettyHelper.getJettyURL().toURI().getAuthority());
+    Assert.assertEquals(fs.getUri(), uri);
     fs.close();
   }
 
@@ -474,8 +483,9 @@ public class TestHttpFSFileSystem extend
     for (int i = 0; i < Operation.values().length; i++) {
       ops[i] = new Object[]{Operation.values()[i]};
     }
+    //To test one or a subset of operations do:
+    //return Arrays.asList(new Object[][]{ new Object[]{Operation.OPEN}});
     return Arrays.asList(ops);
-//    return Arrays.asList(new Object[][]{ new Object[]{Operation.CREATE}});
   }
 
   private Operation operation;

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystem.java.orig
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystem.java.orig?rev=1365988&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystem.java.orig (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystem.java.orig Thu Jul 26 13:39:05 2012
@@ -0,0 +1,513 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.http.client;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.Writer;
+import java.net.URL;
+import java.security.PrivilegedExceptionAction;
+import java.util.Arrays;
+import java.util.Collection;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.fs.ContentSummary;
+import org.apache.hadoop.fs.FileChecksum;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.http.server.HttpFSServerWebApp;
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.test.HFSTestCase;
+import org.apache.hadoop.test.HadoopUsersConfTestHelper;
+import org.apache.hadoop.test.TestDir;
+import org.apache.hadoop.test.TestDirHelper;
+import org.apache.hadoop.test.TestHdfs;
+import org.apache.hadoop.test.TestHdfsHelper;
+import org.apache.hadoop.test.TestJetty;
+import org.apache.hadoop.test.TestJettyHelper;
+import org.junit.Assert;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.mortbay.jetty.Server;
+import org.mortbay.jetty.webapp.WebAppContext;
+
+@RunWith(value = Parameterized.class)
+public class TestHttpFSFileSystem extends HFSTestCase {
+
+  private void createHttpFSServer() throws Exception {
+    File homeDir = TestDirHelper.getTestDir();
+    Assert.assertTrue(new File(homeDir, "conf").mkdir());
+    Assert.assertTrue(new File(homeDir, "log").mkdir());
+    Assert.assertTrue(new File(homeDir, "temp").mkdir());
+    HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());
+
+    File secretFile = new File(new File(homeDir, "conf"), "secret");
+    Writer w = new FileWriter(secretFile);
+    w.write("secret");
+    w.close();
+
+    //HDFS configuration
+    String fsDefaultName = TestHdfsHelper.getHdfsConf().get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY);
+    Configuration conf = new Configuration(false);
+    conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, fsDefaultName);
+    File hdfsSite = new File(new File(homeDir, "conf"), "hdfs-site.xml");
+    OutputStream os = new FileOutputStream(hdfsSite);
+    conf.writeXml(os);
+    os.close();
+
+    //HTTPFS configuration
+    conf = new Configuration(false);
+    conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".groups",
+             HadoopUsersConfTestHelper.getHadoopProxyUserGroups());
+    conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
+             HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
+    conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
+    File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
+    os = new FileOutputStream(httpfsSite);
+    conf.writeXml(os);
+    os.close();
+
+    ClassLoader cl = Thread.currentThread().getContextClassLoader();
+    URL url = cl.getResource("webapp");
+    WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs");
+    Server server = TestJettyHelper.getJettyServer();
+    server.addHandler(context);
+    server.start();
+  }
+
+  protected FileSystem getHttpFileSystem() throws Exception {
+    Configuration conf = new Configuration();
+    conf.set("fs.http.impl", HttpFSFileSystem.class.getName());
+    return FileSystem.get(TestJettyHelper.getJettyURL().toURI(), conf);
+  }
+
+  protected void testGet() throws Exception {
+    FileSystem fs = getHttpFileSystem();
+    Assert.assertNotNull(fs);
+    Assert.assertEquals(fs.getUri(), TestJettyHelper.getJettyURL().toURI());
+    fs.close();
+  }
+
+  private void testOpen() throws Exception {
+    FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo.txt");
+    OutputStream os = fs.create(path);
+    os.write(1);
+    os.close();
+    fs.close();
+    fs = getHttpFileSystem();
+    InputStream is = fs.open(new Path(path.toUri().getPath()));
+    Assert.assertEquals(is.read(), 1);
+    is.close();
+    fs.close();
+  }
+
+  private void testCreate(Path path, boolean override) throws Exception {
+    FileSystem fs = getHttpFileSystem();
+    FsPermission permission = new FsPermission(FsAction.READ_WRITE, FsAction.NONE, FsAction.NONE);
+    OutputStream os = fs.create(new Path(path.toUri().getPath()), permission, override, 1024,
+                                (short) 2, 100 * 1024 * 1024, null);
+    os.write(1);
+    os.close();
+    fs.close();
+
+    fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    FileStatus status = fs.getFileStatus(path);
+    Assert.assertEquals(status.getReplication(), 2);
+    Assert.assertEquals(status.getBlockSize(), 100 * 1024 * 1024);
+    Assert.assertEquals(status.getPermission(), permission);
+    InputStream is = fs.open(path);
+    Assert.assertEquals(is.read(), 1);
+    is.close();
+    fs.close();
+  }
+
+  private void testCreate() throws Exception {
+    Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo.txt");
+    testCreate(path, false);
+    testCreate(path, true);
+    try {
+      testCreate(path, false);
+      Assert.fail();
+    } catch (IOException ex) {
+
+    } catch (Exception ex) {
+      Assert.fail();
+    }
+  }
+
+  private void testAppend() throws Exception {
+    FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo.txt");
+    OutputStream os = fs.create(path);
+    os.write(1);
+    os.close();
+    fs.close();
+    fs = getHttpFileSystem();
+    os = fs.append(new Path(path.toUri().getPath()));
+    os.write(2);
+    os.close();
+    fs.close();
+    fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    InputStream is = fs.open(path);
+    Assert.assertEquals(is.read(), 1);
+    Assert.assertEquals(is.read(), 2);
+    Assert.assertEquals(is.read(), -1);
+    is.close();
+    fs.close();
+  }
+
+  private void testRename() throws Exception {
+    FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo");
+    fs.mkdirs(path);
+    fs.close();
+    fs = getHttpFileSystem();
+    Path oldPath = new Path(path.toUri().getPath());
+    Path newPath = new Path(path.getParent(), "bar");
+    fs.rename(oldPath, newPath);
+    fs.close();
+    fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    Assert.assertFalse(fs.exists(oldPath));
+    Assert.assertTrue(fs.exists(newPath));
+    fs.close();
+  }
+
+  private void testDelete() throws Exception {
+    Path foo = new Path(TestHdfsHelper.getHdfsTestDir(), "foo");
+    Path bar = new Path(TestHdfsHelper.getHdfsTestDir(), "bar");
+    Path foe = new Path(TestHdfsHelper.getHdfsTestDir(), "foe");
+    FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    fs.mkdirs(foo);
+    fs.mkdirs(new Path(bar, "a"));
+    fs.mkdirs(foe);
+
+    FileSystem hoopFs = getHttpFileSystem();
+    Assert.assertTrue(hoopFs.delete(new Path(foo.toUri().getPath()), false));
+    Assert.assertFalse(fs.exists(foo));
+    try {
+      hoopFs.delete(new Path(bar.toUri().getPath()), false);
+      Assert.fail();
+    } catch (IOException ex) {
+    } catch (Exception ex) {
+      Assert.fail();
+    }
+    Assert.assertTrue(fs.exists(bar));
+    Assert.assertTrue(hoopFs.delete(new Path(bar.toUri().getPath()), true));
+    Assert.assertFalse(fs.exists(bar));
+
+    Assert.assertTrue(fs.exists(foe));
+    Assert.assertTrue(hoopFs.delete(foe, true));
+    Assert.assertFalse(fs.exists(foe));
+
+    hoopFs.close();
+    fs.close();
+  }
+
+  private void testListStatus() throws Exception {
+    FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo.txt");
+    OutputStream os = fs.create(path);
+    os.write(1);
+    os.close();
+    FileStatus status1 = fs.getFileStatus(path);
+    fs.close();
+
+    fs = getHttpFileSystem();
+    FileStatus status2 = fs.getFileStatus(new Path(path.toUri().getPath()));
+    fs.close();
+
+    Assert.assertEquals(status2.getPermission(), status1.getPermission());
+    Assert.assertEquals(status2.getPath().toUri().getPath(), status1.getPath().toUri().getPath());
+    Assert.assertEquals(status2.getReplication(), status1.getReplication());
+    Assert.assertEquals(status2.getBlockSize(), status1.getBlockSize());
+    Assert.assertEquals(status2.getAccessTime(), status1.getAccessTime());
+    Assert.assertEquals(status2.getModificationTime(), status1.getModificationTime());
+    Assert.assertEquals(status2.getOwner(), status1.getOwner());
+    Assert.assertEquals(status2.getGroup(), status1.getGroup());
+    Assert.assertEquals(status2.getLen(), status1.getLen());
+
+    FileStatus[] stati = fs.listStatus(path.getParent());
+    Assert.assertEquals(stati.length, 1);
+    Assert.assertEquals(stati[0].getPath().getName(), path.getName());
+  }
+
+  private void testWorkingdirectory() throws Exception {
+    FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    Path workingDir = fs.getWorkingDirectory();
+    fs.close();
+
+    fs = getHttpFileSystem();
+    Path hoopWorkingDir = fs.getWorkingDirectory();
+    fs.close();
+    Assert.assertEquals(hoopWorkingDir.toUri().getPath(), workingDir.toUri().getPath());
+
+    fs = getHttpFileSystem();
+    fs.setWorkingDirectory(new Path("/tmp"));
+    workingDir = fs.getWorkingDirectory();
+    fs.close();
+    Assert.assertEquals(workingDir.toUri().getPath(), new Path("/tmp").toUri().getPath());
+  }
+
+  private void testMkdirs() throws Exception {
+    Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo");
+    FileSystem fs = getHttpFileSystem();
+    fs.mkdirs(path);
+    fs.close();
+    fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    Assert.assertTrue(fs.exists(path));
+    fs.close();
+  }
+
+  private void testSetTimes() throws Exception {
+    FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo.txt");
+    OutputStream os = fs.create(path);
+    os.write(1);
+    os.close();
+    FileStatus status1 = fs.getFileStatus(path);
+    fs.close();
+    long at = status1.getAccessTime();
+    long mt = status1.getModificationTime();
+
+    fs = getHttpFileSystem();
+    fs.setTimes(path, mt + 10, at + 20);
+    fs.close();
+
+    fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    status1 = fs.getFileStatus(path);
+    fs.close();
+    long atNew = status1.getAccessTime();
+    long mtNew = status1.getModificationTime();
+    Assert.assertEquals(mtNew, mt + 10);
+    Assert.assertEquals(atNew, at + 20);
+  }
+
+  private void testSetPermission() throws Exception {
+    FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foodir");
+    fs.mkdirs(path);
+
+    fs = getHttpFileSystem();
+    FsPermission permission1 = new FsPermission(FsAction.READ_WRITE, FsAction.NONE, FsAction.NONE);
+    fs.setPermission(path, permission1);
+    fs.close();
+
+    fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    FileStatus status1 = fs.getFileStatus(path);
+    fs.close();
+    FsPermission permission2 = status1.getPermission();
+    Assert.assertEquals(permission2, permission1);
+
+    //sticky bit 
+    fs = getHttpFileSystem();
+    permission1 = new FsPermission(FsAction.READ_WRITE, FsAction.NONE, FsAction.NONE, true);
+    fs.setPermission(path, permission1);
+    fs.close();
+
+    fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    status1 = fs.getFileStatus(path);
+    fs.close();
+    permission2 = status1.getPermission();
+    Assert.assertTrue(permission2.getStickyBit());
+    Assert.assertEquals(permission2, permission1);
+  }
+
+  private void testSetOwner() throws Exception {
+    FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo.txt");
+    OutputStream os = fs.create(path);
+    os.write(1);
+    os.close();
+    fs.close();
+
+    fs = getHttpFileSystem();
+    String user = HadoopUsersConfTestHelper.getHadoopUsers()[1];
+    String group = HadoopUsersConfTestHelper.getHadoopUserGroups(user)[0];
+    fs.setOwner(path, user, group);
+    fs.close();
+
+    fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    FileStatus status1 = fs.getFileStatus(path);
+    fs.close();
+    Assert.assertEquals(status1.getOwner(), user);
+    Assert.assertEquals(status1.getGroup(), group);
+  }
+
+  private void testSetReplication() throws Exception {
+    FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo.txt");
+    OutputStream os = fs.create(path);
+    os.write(1);
+    os.close();
+    fs.close();
+    fs.setReplication(path, (short) 2);
+
+    fs = getHttpFileSystem();
+    fs.setReplication(path, (short) 1);
+    fs.close();
+
+    fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    FileStatus status1 = fs.getFileStatus(path);
+    fs.close();
+    Assert.assertEquals(status1.getReplication(), (short) 1);
+  }
+
+  private void testChecksum() throws Exception {
+    FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo.txt");
+    OutputStream os = fs.create(path);
+    os.write(1);
+    os.close();
+    FileChecksum hdfsChecksum = fs.getFileChecksum(path);
+    fs.close();
+    fs = getHttpFileSystem();
+    FileChecksum httpChecksum = fs.getFileChecksum(path);
+    fs.close();
+    Assert.assertEquals(httpChecksum.getAlgorithmName(), hdfsChecksum.getAlgorithmName());
+    Assert.assertEquals(httpChecksum.getLength(), hdfsChecksum.getLength());
+    Assert.assertArrayEquals(httpChecksum.getBytes(), hdfsChecksum.getBytes());
+  }
+
+  private void testContentSummary() throws Exception {
+    FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo.txt");
+    OutputStream os = fs.create(path);
+    os.write(1);
+    os.close();
+    ContentSummary hdfsContentSummary = fs.getContentSummary(path);
+    fs.close();
+    fs = getHttpFileSystem();
+    ContentSummary httpContentSummary = fs.getContentSummary(path);
+    fs.close();
+    Assert.assertEquals(httpContentSummary.getDirectoryCount(), hdfsContentSummary.getDirectoryCount());
+    Assert.assertEquals(httpContentSummary.getFileCount(), hdfsContentSummary.getFileCount());
+    Assert.assertEquals(httpContentSummary.getLength(), hdfsContentSummary.getLength());
+    Assert.assertEquals(httpContentSummary.getQuota(), hdfsContentSummary.getQuota());
+    Assert.assertEquals(httpContentSummary.getSpaceConsumed(), hdfsContentSummary.getSpaceConsumed());
+    Assert.assertEquals(httpContentSummary.getSpaceQuota(), hdfsContentSummary.getSpaceQuota());
+  }
+
+  protected enum Operation {
+    GET, OPEN, CREATE, APPEND, RENAME, DELETE, LIST_STATUS, WORKING_DIRECTORY, MKDIRS,
+    SET_TIMES, SET_PERMISSION, SET_OWNER, SET_REPLICATION, CHECKSUM, CONTENT_SUMMARY
+  }
+
+  private void operation(Operation op) throws Exception {
+    switch (op) {
+      case GET:
+        testGet();
+        break;
+      case OPEN:
+        testOpen();
+        break;
+      case CREATE:
+        testCreate();
+        break;
+      case APPEND:
+        testAppend();
+        break;
+      case RENAME:
+        testRename();
+        break;
+      case DELETE:
+        testDelete();
+        break;
+      case LIST_STATUS:
+        testListStatus();
+        break;
+      case WORKING_DIRECTORY:
+        testWorkingdirectory();
+        break;
+      case MKDIRS:
+        testMkdirs();
+        break;
+      case SET_TIMES:
+        testSetTimes();
+        break;
+      case SET_PERMISSION:
+        testSetPermission();
+        break;
+      case SET_OWNER:
+        testSetOwner();
+        break;
+      case SET_REPLICATION:
+        testSetReplication();
+        break;
+      case CHECKSUM:
+        testChecksum();
+        break;
+      case CONTENT_SUMMARY:
+        testContentSummary();
+        break;
+    }
+  }
+
+  @Parameterized.Parameters
+  public static Collection operations() {
+    Object[][] ops = new Object[Operation.values().length][];
+    for (int i = 0; i < Operation.values().length; i++) {
+      ops[i] = new Object[]{Operation.values()[i]};
+    }
+    return Arrays.asList(ops);
+//    return Arrays.asList(new Object[][]{ new Object[]{Operation.CREATE}});
+  }
+
+  private Operation operation;
+
+  public TestHttpFSFileSystem(Operation operation) {
+    this.operation = operation;
+  }
+
+  @Test
+  @TestDir
+  @TestJetty
+  @TestHdfs
+  public void testOperation() throws Exception {
+    createHttpFSServer();
+    operation(operation);
+  }
+
+  @Test
+  @TestDir
+  @TestJetty
+  @TestHdfs
+  public void testOperationDoAs() throws Exception {
+    createHttpFSServer();
+    UserGroupInformation ugi = UserGroupInformation.createProxyUser(HadoopUsersConfTestHelper.getHadoopUsers()[0],
+                                                                    UserGroupInformation.getCurrentUser());
+    ugi.doAs(new PrivilegedExceptionAction<Void>() {
+      @Override
+      public Void run() throws Exception {
+        operation(operation);
+        return null;
+      }
+    });
+  }
+
+}

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestWebhdfsFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestWebhdfsFileSystem.java?rev=1365988&r1=1365987&r2=1365988&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestWebhdfsFileSystem.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestWebhdfsFileSystem.java Thu Jul 26 13:39:05 2012
@@ -36,20 +36,8 @@ public class TestWebhdfsFileSystem exten
   }
 
   @Override
-  protected FileSystem getHttpFileSystem() throws Exception {
-    Configuration conf = new Configuration();
-    conf.set("fs.webhdfs.impl", WebHdfsFileSystem.class.getName());
-    URI uri = new URI("webhdfs://" + TestJettyHelper.getJettyURL().toURI().getAuthority());
-    return FileSystem.get(uri, conf);
-  }
-
-  @Override
-  protected void testGet() throws Exception {
-    FileSystem fs = getHttpFileSystem();
-    Assert.assertNotNull(fs);
-    URI uri = new URI("webhdfs://" + TestJettyHelper.getJettyURL().toURI().getAuthority());
-    Assert.assertEquals(fs.getUri(), uri);
-    fs.close();
+  protected Class getFileSystemClass() {
+    return WebHdfsFileSystem.class;
   }
 
 }

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/HttpFSKerberosAuthenticationHandlerForTesting.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/HttpFSKerberosAuthenticationHandlerForTesting.java?rev=1365988&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/HttpFSKerberosAuthenticationHandlerForTesting.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/HttpFSKerberosAuthenticationHandlerForTesting.java Thu Jul 26 13:39:05 2012
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.http.server;
+
+import javax.servlet.ServletException;
+import java.util.Properties;
+
+public class HttpFSKerberosAuthenticationHandlerForTesting
+  extends HttpFSKerberosAuthenticationHandler {
+
+  @Override
+  public void init(Properties config) throws ServletException {
+    //NOP overwrite to avoid Kerberos initialization
+  }
+
+  @Override
+  public void destroy() {
+    //NOP overwrite to avoid Kerberos initialization
+  }
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSKerberosAuthenticationHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSKerberosAuthenticationHandler.java?rev=1365988&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSKerberosAuthenticationHandler.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSKerberosAuthenticationHandler.java Thu Jul 26 13:39:05 2012
@@ -0,0 +1,310 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.http.server;
+
+import junit.framework.Assert;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
+import org.apache.hadoop.fs.http.client.HttpFSKerberosAuthenticator;
+import org.apache.hadoop.fs.http.client.HttpFSKerberosAuthenticator.DelegationTokenOperation;
+import org.apache.hadoop.lib.service.DelegationTokenIdentifier;
+import org.apache.hadoop.lib.service.DelegationTokenManager;
+import org.apache.hadoop.lib.service.DelegationTokenManagerException;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.client.AuthenticationException;
+import org.apache.hadoop.security.authentication.server.AuthenticationHandler;
+import org.apache.hadoop.security.authentication.server.AuthenticationToken;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.test.HFSTestCase;
+import org.apache.hadoop.test.TestDir;
+import org.apache.hadoop.test.TestDirHelper;
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.core.MediaType;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+
+public class TestHttpFSKerberosAuthenticationHandler extends HFSTestCase {
+
+  @Test
+  @TestDir
+  public void testManagementOperations() throws Exception {
+    String dir = TestDirHelper.getTestDir().getAbsolutePath();
+
+    Configuration httpfsConf = new Configuration(false);
+    HttpFSServerWebApp server =
+      new HttpFSServerWebApp(dir, dir, dir, dir, httpfsConf);
+    server.setAuthority(new InetSocketAddress(InetAddress.getLocalHost(), 
+                                              14000));
+    AuthenticationHandler handler =
+      new HttpFSKerberosAuthenticationHandlerForTesting();
+    try {
+      server.init();
+      handler.init(null);
+
+      testNonManagementOperation(handler);
+      testManagementOperationErrors(handler);
+      testGetToken(handler, false, null);
+      testGetToken(handler, true, null);
+      testGetToken(handler, false, "foo");
+      testGetToken(handler, true, "foo");
+      testCancelToken(handler);
+      testRenewToken(handler);
+
+    } finally {
+      if (handler != null) {
+        handler.destroy();
+      }
+    server.destroy();
+    }
+  }
+
+  private void testNonManagementOperation(AuthenticationHandler handler)
+    throws Exception {
+    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
+    Mockito.when(request.getParameter(HttpFSFileSystem.OP_PARAM)).
+      thenReturn(null);
+    Assert.assertTrue(handler.managementOperation(null, request, null));
+    Mockito.when(request.getParameter(HttpFSFileSystem.OP_PARAM)).
+      thenReturn(HttpFSFileSystem.Operation.CREATE.toString());
+    Assert.assertTrue(handler.managementOperation(null, request, null));
+  }
+
+  private void testManagementOperationErrors(AuthenticationHandler handler)
+    throws Exception {
+    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
+    HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
+    Mockito.when(request.getParameter(HttpFSFileSystem.OP_PARAM)).
+      thenReturn(DelegationTokenOperation.GETDELEGATIONTOKEN.toString());
+    Mockito.when(request.getMethod()).thenReturn("FOO");
+    Assert.assertFalse(handler.managementOperation(null, request, response));
+    Mockito.verify(response).sendError(
+      Mockito.eq(HttpServletResponse.SC_BAD_REQUEST),
+      Mockito.startsWith("Wrong HTTP method"));
+
+    Mockito.reset(response);
+    Mockito.when(request.getMethod()).
+      thenReturn(DelegationTokenOperation.GETDELEGATIONTOKEN.getHttpMethod());
+    Assert.assertFalse(handler.managementOperation(null, request, response));
+    Mockito.verify(response).sendError(
+      Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED),
+      Mockito.contains("requires SPNEGO"));
+  }
+
+  private void testGetToken(AuthenticationHandler handler, boolean tokens,
+                            String renewer)
+    throws Exception {
+    DelegationTokenOperation op =
+      (tokens) ? DelegationTokenOperation.GETDELEGATIONTOKENS
+               : DelegationTokenOperation.GETDELEGATIONTOKEN;
+    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
+    HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
+    Mockito.when(request.getParameter(HttpFSFileSystem.OP_PARAM)).
+      thenReturn(op.toString());
+    Mockito.when(request.getMethod()).
+      thenReturn(op.getHttpMethod());
+
+    AuthenticationToken token = Mockito.mock(AuthenticationToken.class);
+    Mockito.when(token.getUserName()).thenReturn("user");
+    Assert.assertFalse(handler.managementOperation(null, request, response));
+    Mockito.when(request.getParameter(HttpFSKerberosAuthenticator.RENEWER_PARAM)).
+      thenReturn(renewer);
+
+    Mockito.reset(response);
+    StringWriter writer = new StringWriter();
+    PrintWriter pwriter = new PrintWriter(writer);
+    Mockito.when(response.getWriter()).thenReturn(pwriter);
+    Assert.assertFalse(handler.managementOperation(token, request, response));
+    if (renewer == null) {
+      Mockito.verify(token).getUserName();
+    } else {
+      Mockito.verify(token, Mockito.never()).getUserName();
+    }
+    Mockito.verify(response).setStatus(HttpServletResponse.SC_OK);
+    Mockito.verify(response).setContentType(MediaType.APPLICATION_JSON);
+    pwriter.close();
+    String responseOutput = writer.toString();
+    String tokenLabel = (tokens)
+                        ? HttpFSKerberosAuthenticator.DELEGATION_TOKENS_JSON
+                        : HttpFSKerberosAuthenticator.DELEGATION_TOKEN_JSON;
+    if (tokens) {
+      Assert.assertTrue(responseOutput.contains(tokenLabel));
+    } else {
+      Assert.assertTrue(responseOutput.contains(tokenLabel));
+    }
+    Assert.assertTrue(responseOutput.contains(
+      HttpFSKerberosAuthenticator.DELEGATION_TOKEN_URL_STRING_JSON));
+    JSONObject json = (JSONObject) new JSONParser().parse(responseOutput);
+    json = (JSONObject) json.get(tokenLabel);
+    String tokenStr;
+    if (tokens) {
+      json = (JSONObject) ((JSONArray)
+        json.get(HttpFSKerberosAuthenticator.DELEGATION_TOKEN_JSON)).get(0);
+    }
+    tokenStr = (String)
+      json.get(HttpFSKerberosAuthenticator.DELEGATION_TOKEN_URL_STRING_JSON);
+    Token<DelegationTokenIdentifier> dt = new Token<DelegationTokenIdentifier>();
+    dt.decodeFromUrlString(tokenStr);
+    HttpFSServerWebApp.get().get(DelegationTokenManager.class).verifyToken(dt);
+  }
+
+  private void testCancelToken(AuthenticationHandler handler)
+    throws Exception {
+    DelegationTokenOperation op =
+      DelegationTokenOperation.CANCELDELEGATIONTOKEN;
+    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
+    HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
+    Mockito.when(request.getParameter(HttpFSFileSystem.OP_PARAM)).
+      thenReturn(op.toString());
+    Mockito.when(request.getMethod()).
+      thenReturn(op.getHttpMethod());
+
+    Assert.assertFalse(handler.managementOperation(null, request, response));
+    Mockito.verify(response).sendError(
+      Mockito.eq(HttpServletResponse.SC_BAD_REQUEST),
+      Mockito.contains("requires the parameter [token]"));
+
+    Mockito.reset(response);
+    Token<DelegationTokenIdentifier> token =
+      HttpFSServerWebApp.get().get(DelegationTokenManager.class).createToken(
+        UserGroupInformation.getCurrentUser(), "foo");
+    Mockito.when(request.getParameter(HttpFSKerberosAuthenticator.TOKEN_PARAM)).
+      thenReturn(token.encodeToUrlString());
+    Assert.assertFalse(handler.managementOperation(null, request, response));
+    Mockito.verify(response).setStatus(HttpServletResponse.SC_OK);
+    try {
+      HttpFSServerWebApp.get().get(DelegationTokenManager.class).verifyToken(token);
+      Assert.fail();
+    }
+    catch (DelegationTokenManagerException ex) {
+      Assert.assertTrue(ex.toString().contains("DT01"));
+    }
+  }
+
+  private void testRenewToken(AuthenticationHandler handler)
+    throws Exception {
+    DelegationTokenOperation op =
+      DelegationTokenOperation.RENEWDELEGATIONTOKEN;
+    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
+    HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
+    Mockito.when(request.getParameter(HttpFSFileSystem.OP_PARAM)).
+      thenReturn(op.toString());
+    Mockito.when(request.getMethod()).
+      thenReturn(op.getHttpMethod());
+
+    Assert.assertFalse(handler.managementOperation(null, request, response));
+    Mockito.verify(response).sendError(
+      Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED),
+      Mockito.contains("equires SPNEGO authentication established"));
+
+    Mockito.reset(response);
+    AuthenticationToken token = Mockito.mock(AuthenticationToken.class);
+    Mockito.when(token.getUserName()).thenReturn("user");
+    Assert.assertFalse(handler.managementOperation(token, request, response));
+    Mockito.verify(response).sendError(
+      Mockito.eq(HttpServletResponse.SC_BAD_REQUEST),
+      Mockito.contains("requires the parameter [token]"));
+
+    Mockito.reset(response);
+    StringWriter writer = new StringWriter();
+    PrintWriter pwriter = new PrintWriter(writer);
+    Mockito.when(response.getWriter()).thenReturn(pwriter);
+    Token<DelegationTokenIdentifier> dToken =
+      HttpFSServerWebApp.get().get(DelegationTokenManager.class).createToken(
+        UserGroupInformation.getCurrentUser(), "user");
+    Mockito.when(request.getParameter(HttpFSKerberosAuthenticator.TOKEN_PARAM)).
+      thenReturn(dToken.encodeToUrlString());
+    Assert.assertFalse(handler.managementOperation(token, request, response));
+    Mockito.verify(response).setStatus(HttpServletResponse.SC_OK);
+    pwriter.close();
+    Assert.assertTrue(writer.toString().contains("long"));
+    HttpFSServerWebApp.get().get(DelegationTokenManager.class).verifyToken(dToken);
+  }
+
+  @Test
+  @TestDir
+  public void testAuthenticate() throws Exception {
+    String dir = TestDirHelper.getTestDir().getAbsolutePath();
+
+    Configuration httpfsConf = new Configuration(false);
+    HttpFSServerWebApp server =
+      new HttpFSServerWebApp(dir, dir, dir, dir, httpfsConf);
+    server.setAuthority(new InetSocketAddress(InetAddress.getLocalHost(),
+                                              14000));
+    AuthenticationHandler handler =
+      new HttpFSKerberosAuthenticationHandlerForTesting();
+    try {
+      server.init();
+      handler.init(null);
+
+      testValidDelegationToken(handler);
+      testInvalidDelegationToken(handler);
+    } finally {
+      if (handler != null) {
+        handler.destroy();
+      }
+    server.destroy();
+    }
+  }
+
+  private void testValidDelegationToken(AuthenticationHandler handler)
+    throws Exception {
+    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
+    HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
+    Token<DelegationTokenIdentifier> dToken =
+      HttpFSServerWebApp.get().get(DelegationTokenManager.class).createToken(
+        UserGroupInformation.getCurrentUser(), "user");
+    Mockito.when(request.getParameter(HttpFSKerberosAuthenticator.DELEGATION_PARAM)).
+      thenReturn(dToken.encodeToUrlString());
+
+    AuthenticationToken token = handler.authenticate(request, response);
+    Assert.assertEquals(UserGroupInformation.getCurrentUser().getShortUserName(),
+                        token.getUserName());
+    Assert.assertEquals(0, token.getExpires());
+    Assert.assertEquals(HttpFSKerberosAuthenticationHandler.TYPE,
+                        token.getType());
+    Assert.assertTrue(token.isExpired());
+  }
+
+  private void testInvalidDelegationToken(AuthenticationHandler handler)
+    throws Exception {
+    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
+    HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
+    Mockito.when(request.getParameter(HttpFSKerberosAuthenticator.DELEGATION_PARAM)).
+      thenReturn("invalid");
+
+    try {
+      handler.authenticate(request, response);
+      Assert.fail();
+    } catch (AuthenticationException ex) {
+      //NOP
+    } catch (Exception ex) {
+      Assert.fail();
+    }
+  }
+
+}

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java?rev=1365988&r1=1365987&r2=1365988&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java Thu Jul 26 13:39:05 2012
@@ -15,11 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.fs.http.server;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import org.junit.Assert;
 
 import java.io.BufferedReader;
 import java.io.File;
@@ -39,9 +37,13 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.http.client.HttpFSKerberosAuthenticator;
 import org.apache.hadoop.lib.server.Service;
 import org.apache.hadoop.lib.server.ServiceException;
 import org.apache.hadoop.lib.service.Groups;
+import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
+import org.apache.hadoop.security.authentication.server.AuthenticationToken;
+import org.apache.hadoop.security.authentication.util.Signer;
 import org.apache.hadoop.test.HFSTestCase;
 import org.apache.hadoop.test.HadoopUsersConfTestHelper;
 import org.apache.hadoop.test.TestDir;
@@ -50,6 +52,8 @@ import org.apache.hadoop.test.TestHdfs;
 import org.apache.hadoop.test.TestHdfsHelper;
 import org.apache.hadoop.test.TestJetty;
 import org.apache.hadoop.test.TestJettyHelper;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
 import org.junit.Test;
 import org.mortbay.jetty.Server;
 import org.mortbay.jetty.webapp.WebAppContext;
@@ -103,11 +107,13 @@ public class TestHttpFSServer extends HF
     }
 
   }
-  private void createHttpFSServer() throws Exception {
+
+  private void createHttpFSServer(boolean addDelegationTokenAuthHandler)
+    throws Exception {
     File homeDir = TestDirHelper.getTestDir();
-    assertTrue(new File(homeDir, "conf").mkdir());
-    assertTrue(new File(homeDir, "log").mkdir());
-    assertTrue(new File(homeDir, "temp").mkdir());
+    Assert.assertTrue(new File(homeDir, "conf").mkdir());
+    Assert.assertTrue(new File(homeDir, "log").mkdir());
+    Assert.assertTrue(new File(homeDir, "temp").mkdir());
     HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());
 
     File secretFile = new File(new File(homeDir, "conf"), "secret");
@@ -128,6 +134,10 @@ public class TestHttpFSServer extends HF
 
     //HTTPFS configuration
     conf = new Configuration(false);
+    if (addDelegationTokenAuthHandler) {
+     conf.set("httpfs.authentication.type",
+              HttpFSKerberosAuthenticationHandlerForTesting.class.getName());
+    }
     conf.set("httpfs.services.ext", MockGroups.class.getName());
     conf.set("httpfs.admin.group", HadoopUsersConfTestHelper.
       getHadoopUserGroups(HadoopUsersConfTestHelper.getHadoopUsers()[0])[0]);
@@ -147,6 +157,9 @@ public class TestHttpFSServer extends HF
     Server server = TestJettyHelper.getJettyServer();
     server.addHandler(context);
     server.start();
+    if (addDelegationTokenAuthHandler) {
+      HttpFSServerWebApp.get().setAuthority(TestJettyHelper.getAuthority());
+    }
   }
 
   @Test
@@ -154,28 +167,28 @@ public class TestHttpFSServer extends HF
   @TestJetty
   @TestHdfs
   public void instrumentation() throws Exception {
-    createHttpFSServer();
+    createHttpFSServer(false);
 
     URL url = new URL(TestJettyHelper.getJettyURL(),
                       MessageFormat.format("/webhdfs/v1?user.name={0}&op=instrumentation", "nobody"));
     HttpURLConnection conn = (HttpURLConnection) url.openConnection();
-    assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_UNAUTHORIZED);
+    Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_UNAUTHORIZED);
 
     url = new URL(TestJettyHelper.getJettyURL(),
                   MessageFormat.format("/webhdfs/v1?user.name={0}&op=instrumentation",
                                        HadoopUsersConfTestHelper.getHadoopUsers()[0]));
     conn = (HttpURLConnection) url.openConnection();
-    assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
+    Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
     BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
     String line = reader.readLine();
     reader.close();
-    assertTrue(line.contains("\"counters\":{"));
+    Assert.assertTrue(line.contains("\"counters\":{"));
 
     url = new URL(TestJettyHelper.getJettyURL(),
                   MessageFormat.format("/webhdfs/v1/foo?user.name={0}&op=instrumentation",
                                        HadoopUsersConfTestHelper.getHadoopUsers()[0]));
     conn = (HttpURLConnection) url.openConnection();
-    assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_BAD_REQUEST);
+    Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_BAD_REQUEST);
   }
 
   @Test
@@ -183,13 +196,13 @@ public class TestHttpFSServer extends HF
   @TestJetty
   @TestHdfs
   public void testHdfsAccess() throws Exception {
-    createHttpFSServer();
+    createHttpFSServer(false);
 
     String user = HadoopUsersConfTestHelper.getHadoopUsers()[0];
     URL url = new URL(TestJettyHelper.getJettyURL(),
                       MessageFormat.format("/webhdfs/v1/?user.name={0}&op=liststatus", user));
     HttpURLConnection conn = (HttpURLConnection) url.openConnection();
-    assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
+    Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
     BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
     reader.readLine();
     reader.close();
@@ -200,7 +213,7 @@ public class TestHttpFSServer extends HF
   @TestJetty
   @TestHdfs
   public void testGlobFilter() throws Exception {
-    createHttpFSServer();
+    createHttpFSServer(false);
 
     FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
     fs.mkdirs(new Path("/tmp"));
@@ -210,7 +223,7 @@ public class TestHttpFSServer extends HF
     URL url = new URL(TestJettyHelper.getJettyURL(),
                       MessageFormat.format("/webhdfs/v1/tmp?user.name={0}&op=liststatus&filter=f*", user));
     HttpURLConnection conn = (HttpURLConnection) url.openConnection();
-    assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
+    Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
     BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
     reader.readLine();
     reader.close();
@@ -221,7 +234,7 @@ public class TestHttpFSServer extends HF
   @TestJetty
   @TestHdfs
   public void testPutNoOperation() throws Exception {
-    createHttpFSServer();
+    createHttpFSServer(false);
 
     String user = HadoopUsersConfTestHelper.getHadoopUsers()[0];
     URL url = new URL(TestJettyHelper.getJettyURL(),
@@ -230,7 +243,87 @@ public class TestHttpFSServer extends HF
     conn.setDoInput(true);
     conn.setDoOutput(true);
     conn.setRequestMethod("PUT");
-    assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_BAD_REQUEST);
+    Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_BAD_REQUEST);
+  }
+
+  @Test
+  @TestDir
+  @TestJetty
+  @TestHdfs
+  public void testDelegationTokenOperations() throws Exception {
+    createHttpFSServer(true);
+
+    URL url = new URL(TestJettyHelper.getJettyURL(),
+                      "/webhdfs/v1/?op=GETHOMEDIRECTORY");
+    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+    Assert.assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED,
+                        conn.getResponseCode());
+
+
+    AuthenticationToken token =
+      new AuthenticationToken("u", "p",
+        HttpFSKerberosAuthenticationHandlerForTesting.TYPE);
+    token.setExpires(System.currentTimeMillis() + 100000000);
+    Signer signer = new Signer("secret".getBytes());
+    String tokenSigned = signer.sign(token.toString());
+
+    url = new URL(TestJettyHelper.getJettyURL(),
+                  "/webhdfs/v1/?op=GETHOMEDIRECTORY");
+    conn = (HttpURLConnection) url.openConnection();
+    conn.setRequestProperty("Cookie",
+                            AuthenticatedURL.AUTH_COOKIE  + "=" + tokenSigned);
+    Assert.assertEquals(HttpURLConnection.HTTP_OK,
+                        conn.getResponseCode());
+
+    url = new URL(TestJettyHelper.getJettyURL(),
+                  "/webhdfs/v1/?op=GETDELEGATIONTOKEN");
+    conn = (HttpURLConnection) url.openConnection();
+    conn.setRequestProperty("Cookie",
+                            AuthenticatedURL.AUTH_COOKIE  + "=" + tokenSigned);
+    Assert.assertEquals(HttpURLConnection.HTTP_OK,
+                        conn.getResponseCode());
+
+    JSONObject json = (JSONObject)
+      new JSONParser().parse(new InputStreamReader(conn.getInputStream()));
+    json = (JSONObject)
+      json.get(HttpFSKerberosAuthenticator.DELEGATION_TOKEN_JSON);
+    String tokenStr = (String)
+        json.get(HttpFSKerberosAuthenticator.DELEGATION_TOKEN_URL_STRING_JSON);
+
+    url = new URL(TestJettyHelper.getJettyURL(),
+                  "/webhdfs/v1/?op=GETHOMEDIRECTORY&delegation=" + tokenStr);
+    conn = (HttpURLConnection) url.openConnection();
+    Assert.assertEquals(HttpURLConnection.HTTP_OK,
+                        conn.getResponseCode());
+
+    url = new URL(TestJettyHelper.getJettyURL(),
+                  "/webhdfs/v1/?op=RENEWDELEGATIONTOKEN&token=" + tokenStr);
+    conn = (HttpURLConnection) url.openConnection();
+    conn.setRequestMethod("PUT");
+    Assert.assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED,
+                        conn.getResponseCode());
+
+    url = new URL(TestJettyHelper.getJettyURL(),
+                  "/webhdfs/v1/?op=RENEWDELEGATIONTOKEN&token=" + tokenStr);
+    conn = (HttpURLConnection) url.openConnection();
+    conn.setRequestMethod("PUT");
+    conn.setRequestProperty("Cookie",
+                            AuthenticatedURL.AUTH_COOKIE  + "=" + tokenSigned);
+    Assert.assertEquals(HttpURLConnection.HTTP_OK,
+                        conn.getResponseCode());
+
+    url = new URL(TestJettyHelper.getJettyURL(),
+                  "/webhdfs/v1/?op=CANCELDELEGATIONTOKEN&token=" + tokenStr);
+    conn = (HttpURLConnection) url.openConnection();
+    conn.setRequestMethod("PUT");
+    Assert.assertEquals(HttpURLConnection.HTTP_OK,
+                        conn.getResponseCode());
+
+    url = new URL(TestJettyHelper.getJettyURL(),
+                  "/webhdfs/v1/?op=GETHOMEDIRECTORY&delegation=" + tokenStr);
+    conn = (HttpURLConnection) url.openConnection();
+    Assert.assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED,
+                        conn.getResponseCode());
   }
 
 }

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java.orig
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java.orig?rev=1365988&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java.orig (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java.orig Thu Jul 26 13:39:05 2012
@@ -0,0 +1,236 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.http.server;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.io.Writer;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.text.MessageFormat;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.lib.server.Service;
+import org.apache.hadoop.lib.server.ServiceException;
+import org.apache.hadoop.lib.service.Groups;
+import org.apache.hadoop.test.HFSTestCase;
+import org.apache.hadoop.test.HadoopUsersConfTestHelper;
+import org.apache.hadoop.test.TestDir;
+import org.apache.hadoop.test.TestDirHelper;
+import org.apache.hadoop.test.TestHdfs;
+import org.apache.hadoop.test.TestHdfsHelper;
+import org.apache.hadoop.test.TestJetty;
+import org.apache.hadoop.test.TestJettyHelper;
+import org.junit.Test;
+import org.mortbay.jetty.Server;
+import org.mortbay.jetty.webapp.WebAppContext;
+
+public class TestHttpFSServer extends HFSTestCase {
+
+  @Test
+  @TestDir
+  @TestJetty
+  public void server() throws Exception {
+    String dir = TestDirHelper.getTestDir().getAbsolutePath();
+
+    Configuration httpfsConf = new Configuration(false);
+    HttpFSServerWebApp server = new HttpFSServerWebApp(dir, dir, dir, dir, httpfsConf);
+    server.init();
+    server.destroy();
+  }
+
+  public static class MockGroups implements Service,Groups {
+
+    @Override
+    public void init(org.apache.hadoop.lib.server.Server server) throws ServiceException {
+    }
+
+    @Override
+    public void postInit() throws ServiceException {
+    }
+
+    @Override
+    public void destroy() {
+    }
+
+    @Override
+    public Class[] getServiceDependencies() {
+      return new Class[0];
+    }
+
+    @Override
+    public Class getInterface() {
+      return Groups.class;
+    }
+
+    @Override
+    public void serverStatusChange(org.apache.hadoop.lib.server.Server.Status oldStatus,
+                                   org.apache.hadoop.lib.server.Server.Status newStatus) throws ServiceException {
+    }
+
+    @Override
+    public List<String> getGroups(String user) throws IOException {
+      return Arrays.asList(HadoopUsersConfTestHelper.getHadoopUserGroups(user));
+    }
+
+  }
+  private void createHttpFSServer() throws Exception {
+    File homeDir = TestDirHelper.getTestDir();
+    assertTrue(new File(homeDir, "conf").mkdir());
+    assertTrue(new File(homeDir, "log").mkdir());
+    assertTrue(new File(homeDir, "temp").mkdir());
+    HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());
+
+    File secretFile = new File(new File(homeDir, "conf"), "secret");
+    Writer w = new FileWriter(secretFile);
+    w.write("secret");
+    w.close();
+
+    //HDFS configuration
+    File hadoopConfDir = new File(new File(homeDir, "conf"), "hadoop-conf");
+    hadoopConfDir.mkdirs();
+    String fsDefaultName = TestHdfsHelper.getHdfsConf().get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY);
+    Configuration conf = new Configuration(false);
+    conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, fsDefaultName);
+    File hdfsSite = new File(hadoopConfDir, "hdfs-site.xml");
+    OutputStream os = new FileOutputStream(hdfsSite);
+    conf.writeXml(os);
+    os.close();
+
+    //HTTPFS configuration
+    conf = new Configuration(false);
+    conf.set("httpfs.services.ext", MockGroups.class.getName());
+    conf.set("httpfs.admin.group", HadoopUsersConfTestHelper.
+      getHadoopUserGroups(HadoopUsersConfTestHelper.getHadoopUsers()[0])[0]);
+    conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".groups",
+             HadoopUsersConfTestHelper.getHadoopProxyUserGroups());
+    conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
+             HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
+    conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
+    File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
+    os = new FileOutputStream(httpfsSite);
+    conf.writeXml(os);
+    os.close();
+
+    ClassLoader cl = Thread.currentThread().getContextClassLoader();
+    URL url = cl.getResource("webapp");
+    WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs");
+    Server server = TestJettyHelper.getJettyServer();
+    server.addHandler(context);
+    server.start();
+  }
+
+  @Test
+  @TestDir
+  @TestJetty
+  @TestHdfs
+  public void instrumentation() throws Exception {
+    createHttpFSServer();
+
+    URL url = new URL(TestJettyHelper.getJettyURL(),
+                      MessageFormat.format("/webhdfs/v1?user.name={0}&op=instrumentation", "nobody"));
+    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+    assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_UNAUTHORIZED);
+
+    url = new URL(TestJettyHelper.getJettyURL(),
+                  MessageFormat.format("/webhdfs/v1?user.name={0}&op=instrumentation",
+                                       HadoopUsersConfTestHelper.getHadoopUsers()[0]));
+    conn = (HttpURLConnection) url.openConnection();
+    assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
+    BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
+    String line = reader.readLine();
+    reader.close();
+    assertTrue(line.contains("\"counters\":{"));
+
+    url = new URL(TestJettyHelper.getJettyURL(),
+                  MessageFormat.format("/webhdfs/v1/foo?user.name={0}&op=instrumentation",
+                                       HadoopUsersConfTestHelper.getHadoopUsers()[0]));
+    conn = (HttpURLConnection) url.openConnection();
+    assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_BAD_REQUEST);
+  }
+
+  @Test
+  @TestDir
+  @TestJetty
+  @TestHdfs
+  public void testHdfsAccess() throws Exception {
+    createHttpFSServer();
+
+    String user = HadoopUsersConfTestHelper.getHadoopUsers()[0];
+    URL url = new URL(TestJettyHelper.getJettyURL(),
+                      MessageFormat.format("/webhdfs/v1/?user.name={0}&op=liststatus", user));
+    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+    assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
+    BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
+    reader.readLine();
+    reader.close();
+  }
+
+  @Test
+  @TestDir
+  @TestJetty
+  @TestHdfs
+  public void testGlobFilter() throws Exception {
+    createHttpFSServer();
+
+    FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
+    fs.mkdirs(new Path("/tmp"));
+    fs.create(new Path("/tmp/foo.txt")).close();
+
+    String user = HadoopUsersConfTestHelper.getHadoopUsers()[0];
+    URL url = new URL(TestJettyHelper.getJettyURL(),
+                      MessageFormat.format("/webhdfs/v1/tmp?user.name={0}&op=liststatus&filter=f*", user));
+    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+    assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
+    BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
+    reader.readLine();
+    reader.close();
+  }
+
+  @Test
+  @TestDir
+  @TestJetty
+  @TestHdfs
+  public void testPutNoOperation() throws Exception {
+    createHttpFSServer();
+
+    String user = HadoopUsersConfTestHelper.getHadoopUsers()[0];
+    URL url = new URL(TestJettyHelper.getJettyURL(),
+                      MessageFormat.format("/webhdfs/v1/foo?user.name={0}", user));
+    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+    conn.setDoInput(true);
+    conn.setDoOutput(true);
+    conn.setRequestMethod("PUT");
+    assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_BAD_REQUEST);
+  }
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSWithKerberos.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSWithKerberos.java?rev=1365988&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSWithKerberos.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSWithKerberos.java Thu Jul 26 13:39:05 2012
@@ -0,0 +1,291 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.http.server;
+
+import junit.framework.Assert;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.fs.DelegationTokenRenewer;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
+import org.apache.hadoop.fs.http.client.HttpFSKerberosAuthenticator;
+import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.test.HFSTestCase;
+import org.apache.hadoop.test.KerberosTestUtils;
+import org.apache.hadoop.test.TestDir;
+import org.apache.hadoop.test.TestDirHelper;
+import org.apache.hadoop.test.TestHdfs;
+import org.apache.hadoop.test.TestHdfsHelper;
+import org.apache.hadoop.test.TestJetty;
+import org.apache.hadoop.test.TestJettyHelper;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.junit.After;
+import org.junit.Test;
+import org.mortbay.jetty.Server;
+import org.mortbay.jetty.webapp.WebAppContext;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.FileWriter;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.io.Writer;
+import java.net.HttpURLConnection;
+import java.net.URI;
+import java.net.URL;
+import java.security.PrivilegedExceptionAction;
+import java.util.concurrent.Callable;
+
+public class TestHttpFSWithKerberos extends HFSTestCase {
+
+  @After
+  public void resetUGI() {
+    Configuration conf = new Configuration();
+    UserGroupInformation.setConfiguration(conf);
+  }
+
+  private void createHttpFSServer() throws Exception {
+    File homeDir = TestDirHelper.getTestDir();
+    Assert.assertTrue(new File(homeDir, "conf").mkdir());
+    Assert.assertTrue(new File(homeDir, "log").mkdir());
+    Assert.assertTrue(new File(homeDir, "temp").mkdir());
+    HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());
+
+    File secretFile = new File(new File(homeDir, "conf"), "secret");
+    Writer w = new FileWriter(secretFile);
+    w.write("secret");
+    w.close();
+
+    //HDFS configuration
+    File hadoopConfDir = new File(new File(homeDir, "conf"), "hadoop-conf");
+    hadoopConfDir.mkdirs();
+    String fsDefaultName = TestHdfsHelper.getHdfsConf()
+      .get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY);
+    Configuration conf = new Configuration(false);
+    conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, fsDefaultName);
+    File hdfsSite = new File(hadoopConfDir, "hdfs-site.xml");
+    OutputStream os = new FileOutputStream(hdfsSite);
+    conf.writeXml(os);
+    os.close();
+
+    conf = new Configuration(false);
+    conf.set("httpfs.proxyuser.client.hosts", "*");
+    conf.set("httpfs.proxyuser.client.groups", "*");
+
+    conf.set("httpfs.authentication.type", "kerberos");
+
+    conf.set("httpfs.authentication.signature.secret.file",
+             secretFile.getAbsolutePath());
+    File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
+    os = new FileOutputStream(httpfsSite);
+    conf.writeXml(os);
+    os.close();
+
+    ClassLoader cl = Thread.currentThread().getContextClassLoader();
+    URL url = cl.getResource("webapp");
+    WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs");
+    Server server = TestJettyHelper.getJettyServer();
+    server.addHandler(context);
+    server.start();
+    HttpFSServerWebApp.get().setAuthority(TestJettyHelper.getAuthority());
+  }
+
+  @Test
+  @TestDir
+  @TestJetty
+  @TestHdfs
+  public void testValidHttpFSAccess() throws Exception {
+    createHttpFSServer();
+
+    KerberosTestUtils.doAsClient(new Callable<Void>() {
+      @Override
+      public Void call() throws Exception {
+        URL url = new URL(TestJettyHelper.getJettyURL(),
+                          "/webhdfs/v1/?op=GETHOMEDIRECTORY");
+        AuthenticatedURL aUrl = new AuthenticatedURL();
+        AuthenticatedURL.Token aToken = new AuthenticatedURL.Token();
+        HttpURLConnection conn = aUrl.openConnection(url, aToken);
+        Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
+        return null;
+      }
+    });
+  }
+
+  @Test
+  @TestDir
+  @TestJetty
+  @TestHdfs
+  public void testInvalidadHttpFSAccess() throws Exception {
+    createHttpFSServer();
+
+    URL url = new URL(TestJettyHelper.getJettyURL(),
+                      "/webhdfs/v1/?op=GETHOMEDIRECTORY");
+    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+    Assert.assertEquals(conn.getResponseCode(),
+                        HttpURLConnection.HTTP_UNAUTHORIZED);
+  }
+
+  @Test
+  @TestDir
+  @TestJetty
+  @TestHdfs
+  public void testDelegationTokenHttpFSAccess() throws Exception {
+    createHttpFSServer();
+
+    KerberosTestUtils.doAsClient(new Callable<Void>() {
+      @Override
+      public Void call() throws Exception {
+        //get delegation token doing SPNEGO authentication
+        URL url = new URL(TestJettyHelper.getJettyURL(),
+                          "/webhdfs/v1/?op=GETDELEGATIONTOKEN");
+        AuthenticatedURL aUrl = new AuthenticatedURL();
+        AuthenticatedURL.Token aToken = new AuthenticatedURL.Token();
+        HttpURLConnection conn = aUrl.openConnection(url, aToken);
+        Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
+        JSONObject json = (JSONObject) new JSONParser()
+          .parse(new InputStreamReader(conn.getInputStream()));
+        json =
+          (JSONObject) json
+            .get(HttpFSKerberosAuthenticator.DELEGATION_TOKEN_JSON);
+        String tokenStr = (String) json
+          .get(HttpFSKerberosAuthenticator.DELEGATION_TOKEN_URL_STRING_JSON);
+
+        //access httpfs using the delegation token
+        url = new URL(TestJettyHelper.getJettyURL(),
+                      "/webhdfs/v1/?op=GETHOMEDIRECTORY&delegation=" +
+                      tokenStr);
+        conn = (HttpURLConnection) url.openConnection();
+        Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
+
+        //try to renew the delegation token without SPNEGO credentials
+        url = new URL(TestJettyHelper.getJettyURL(),
+                      "/webhdfs/v1/?op=RENEWDELEGATIONTOKEN&token=" + tokenStr);
+        conn = (HttpURLConnection) url.openConnection();
+        conn.setRequestMethod("PUT");
+        Assert.assertEquals(conn.getResponseCode(),
+                            HttpURLConnection.HTTP_UNAUTHORIZED);
+
+        //renew the delegation token with SPNEGO credentials
+        url = new URL(TestJettyHelper.getJettyURL(),
+                      "/webhdfs/v1/?op=RENEWDELEGATIONTOKEN&token=" + tokenStr);
+        conn = aUrl.openConnection(url, aToken);
+        conn.setRequestMethod("PUT");
+        Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
+
+        //cancel delegation token, no need for SPNEGO credentials
+        url = new URL(TestJettyHelper.getJettyURL(),
+                      "/webhdfs/v1/?op=CANCELDELEGATIONTOKEN&token=" +
+                      tokenStr);
+        conn = (HttpURLConnection) url.openConnection();
+        conn.setRequestMethod("PUT");
+        Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
+
+        //try to access httpfs with the canceled delegation token
+        url = new URL(TestJettyHelper.getJettyURL(),
+                      "/webhdfs/v1/?op=GETHOMEDIRECTORY&delegation=" +
+                      tokenStr);
+        conn = (HttpURLConnection) url.openConnection();
+        Assert.assertEquals(conn.getResponseCode(),
+                            HttpURLConnection.HTTP_UNAUTHORIZED);
+        return null;
+      }
+    });
+  }
+
+  @SuppressWarnings("deprecation")
+  private void testDelegationTokenWithFS(Class fileSystemClass)
+    throws Exception {
+    createHttpFSServer();
+    Configuration conf = new Configuration();
+    conf.set("fs.webhdfs.impl", fileSystemClass.getName());
+    conf.set("fs.hdfs.impl.disable.cache", "true");
+    URI uri = new URI( "webhdfs://" +
+                       TestJettyHelper.getJettyURL().toURI().getAuthority());
+    FileSystem fs = FileSystem.get(uri, conf);
+    Token<?> token = fs.getDelegationToken("foo");
+    fs.close();
+    fs = FileSystem.get(uri, conf);
+    ((DelegationTokenRenewer.Renewable) fs).setDelegationToken(token);
+    fs.listStatus(new Path("/"));
+    fs.close();
+  }
+
+  private void testDelegationTokenWithinDoAs(
+    final Class fileSystemClass, boolean proxyUser) throws Exception {
+    Configuration conf = new Configuration();
+    conf.set("hadoop.security.authentication", "kerberos");
+    UserGroupInformation.setConfiguration(conf);
+    UserGroupInformation.loginUserFromKeytab("client",
+                                             "/Users/tucu/tucu.keytab");
+    UserGroupInformation ugi = UserGroupInformation.getLoginUser();
+    if (proxyUser) {
+      ugi = UserGroupInformation.createProxyUser("foo", ugi);
+    }
+    conf = new Configuration();
+    UserGroupInformation.setConfiguration(conf);
+    ugi.doAs(
+      new PrivilegedExceptionAction<Void>() {
+        @Override
+        public Void run() throws Exception {
+          testDelegationTokenWithFS(fileSystemClass);
+          return null;
+        }
+      });
+  }
+
+  @Test
+  @TestDir
+  @TestJetty
+  @TestHdfs
+  public void testDelegationTokenWithHttpFSFileSystem() throws Exception {
+    testDelegationTokenWithinDoAs(HttpFSFileSystem.class, false);
+  }
+
+  @Test
+  @TestDir
+  @TestJetty
+  @TestHdfs
+  public void testDelegationTokenWithWebhdfsFileSystem() throws Exception {
+    testDelegationTokenWithinDoAs(WebHdfsFileSystem.class, false);
+  }
+
+  @Test
+  @TestDir
+  @TestJetty
+  @TestHdfs
+  public void testDelegationTokenWithHttpFSFileSystemProxyUser()
+    throws Exception {
+    testDelegationTokenWithinDoAs(HttpFSFileSystem.class, true);
+  }
+
+  // TODO: WebHdfsFilesystem does work with ProxyUser HDFS-3509
+  //    @Test
+  //    @TestDir
+  //    @TestJetty
+  //    @TestHdfs
+  //    public void testDelegationTokenWithWebhdfsFileSystemProxyUser()
+  //      throws Exception {
+  //      testDelegationTokenWithinDoAs(WebHdfsFileSystem.class, true);
+  //    }
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/security/TestDelegationTokenManagerService.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/security/TestDelegationTokenManagerService.java?rev=1365988&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/security/TestDelegationTokenManagerService.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/security/TestDelegationTokenManagerService.java Thu Jul 26 13:39:05 2012
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.lib.service.security;
+
+import junit.framework.Assert;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.http.server.HttpFSServerWebApp;
+import org.apache.hadoop.lib.server.Server;
+import org.apache.hadoop.lib.service.DelegationTokenManager;
+import org.apache.hadoop.lib.service.DelegationTokenManagerException;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.test.HTestCase;
+import org.apache.hadoop.test.TestDir;
+import org.apache.hadoop.test.TestDirHelper;
+import org.apache.hadoop.util.StringUtils;
+import org.junit.Test;
+
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.util.Arrays;
+
+public class TestDelegationTokenManagerService extends HTestCase {
+
+  @Test
+  @TestDir
+  public void service() throws Exception {
+    String dir = TestDirHelper.getTestDir().getAbsolutePath();
+    Configuration conf = new Configuration(false);
+    conf.set("server.services", StringUtils.join(",",
+      Arrays.asList(DelegationTokenManagerService.class.getName())));
+    Server server = new Server("server", dir, dir, dir, dir, conf);
+    server.init();
+    DelegationTokenManager tm = server.get(DelegationTokenManager.class);
+    Assert.assertNotNull(tm);
+    server.destroy();
+  }
+
+  @Test
+  @TestDir
+  @SuppressWarnings("unchecked")
+  public void tokens() throws Exception {
+    String dir = TestDirHelper.getTestDir().getAbsolutePath();
+    Configuration conf = new Configuration(false);
+    conf.set("server.services", StringUtils.join(",",
+      Arrays.asList(DelegationTokenManagerService.class.getName())));
+    HttpFSServerWebApp server = new HttpFSServerWebApp(dir, dir, dir, dir, conf);
+    server.setAuthority(new InetSocketAddress(InetAddress.getLocalHost(), 14000));
+    server.init();
+    DelegationTokenManager tm = server.get(DelegationTokenManager.class);
+    Token token = tm.createToken(UserGroupInformation.getCurrentUser(), "foo");
+    Assert.assertNotNull(token);
+    tm.verifyToken(token);
+    Assert.assertTrue(tm.renewToken(token, "foo") > System.currentTimeMillis());
+    tm.cancelToken(token, "foo");
+    try {
+      tm.verifyToken(token);
+      Assert.fail();
+    } catch (DelegationTokenManagerException ex) {
+      //NOP
+    } catch (Exception ex) {
+      Assert.fail();
+    }
+    server.destroy();
+  }
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/KerberosTestUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/KerberosTestUtils.java?rev=1365988&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/KerberosTestUtils.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/KerberosTestUtils.java Thu Jul 26 13:39:05 2012
@@ -0,0 +1,138 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. See accompanying LICENSE file.
+ */
+package org.apache.hadoop.test;
+
+import javax.security.auth.Subject;
+import javax.security.auth.kerberos.KerberosPrincipal;
+import javax.security.auth.login.AppConfigurationEntry;
+import javax.security.auth.login.Configuration;
+import javax.security.auth.login.LoginContext;
+
+import org.apache.hadoop.security.authentication.util.KerberosUtil;
+
+import java.io.File;
+import java.security.Principal;
+import java.security.PrivilegedActionException;
+import java.security.PrivilegedExceptionAction;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.Callable;
+
+/**
+ * Test helper class for Java Kerberos setup.
+ */
+public class KerberosTestUtils {
+  private static final String PREFIX = "httpfs.test.";
+
+  public static final String REALM = PREFIX + "kerberos.realm";
+
+  public static final String CLIENT_PRINCIPAL =
+    PREFIX + "kerberos.client.principal";
+
+  public static final String SERVER_PRINCIPAL =
+    PREFIX + "kerberos.server.principal";
+
+  public static final String KEYTAB_FILE = PREFIX + "kerberos.keytab.file";
+
+  public static String getRealm() {
+    return System.getProperty(REALM, "LOCALHOST");
+  }
+
+  public static String getClientPrincipal() {
+    return System.getProperty(CLIENT_PRINCIPAL, "client") + "@" + getRealm();
+  }
+
+  public static String getServerPrincipal() {
+    return System.getProperty(SERVER_PRINCIPAL,
+                              "HTTP/localhost") + "@" + getRealm();
+  }
+
+  public static String getKeytabFile() {
+    String keytabFile =
+      new File(System.getProperty("user.home"),
+               System.getProperty("user.name") + ".keytab").toString();
+    return System.getProperty(KEYTAB_FILE, keytabFile);
+  }
+
+  private static class KerberosConfiguration extends Configuration {
+    private String principal;
+
+    public KerberosConfiguration(String principal) {
+      this.principal = principal;
+    }
+
+    @Override
+    public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
+      Map<String, String> options = new HashMap<String, String>();
+      options.put("keyTab", KerberosTestUtils.getKeytabFile());
+      options.put("principal", principal);
+      options.put("useKeyTab", "true");
+      options.put("storeKey", "true");
+      options.put("doNotPrompt", "true");
+      options.put("useTicketCache", "true");
+      options.put("renewTGT", "true");
+      options.put("refreshKrb5Config", "true");
+      options.put("isInitiator", "true");
+      String ticketCache = System.getenv("KRB5CCNAME");
+      if (ticketCache != null) {
+        options.put("ticketCache", ticketCache);
+      }
+      options.put("debug", "true");
+
+      return new AppConfigurationEntry[]{
+        new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(),
+                                  AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
+                                  options),};
+    }
+  }
+
+  public static <T> T doAs(String principal, final Callable<T> callable)
+    throws Exception {
+    LoginContext loginContext = null;
+    try {
+      Set<Principal> principals = new HashSet<Principal>();
+      principals.add(
+        new KerberosPrincipal(KerberosTestUtils.getClientPrincipal()));
+      Subject subject = new Subject(false, principals, new HashSet<Object>(),
+                                    new HashSet<Object>());
+      loginContext = new LoginContext("", subject, null,
+                                      new KerberosConfiguration(principal));
+      loginContext.login();
+      subject = loginContext.getSubject();
+      return Subject.doAs(subject, new PrivilegedExceptionAction<T>() {
+        @Override
+        public T run() throws Exception {
+          return callable.call();
+        }
+      });
+    } catch (PrivilegedActionException ex) {
+      throw ex.getException();
+    } finally {
+      if (loginContext != null) {
+        loginContext.logout();
+      }
+    }
+  }
+
+  public static <T> T doAsClient(Callable<T> callable) throws Exception {
+    return doAs(getClientPrincipal(), callable);
+  }
+
+  public static <T> T doAsServer(Callable<T> callable) throws Exception {
+    return doAs(getServerPrincipal(), callable);
+  }
+
+}

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestDirHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestDirHelper.java?rev=1365988&r1=1365987&r2=1365988&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestDirHelper.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestDirHelper.java Thu Jul 26 13:39:05 2012
@@ -73,7 +73,7 @@ public class TestDirHelper implements Me
         System.exit(-1);
       }
 
-      TEST_DIR_ROOT = new File(TEST_DIR_ROOT, "testdir").getAbsolutePath();
+      TEST_DIR_ROOT = new File(TEST_DIR_ROOT, "test-dir").getAbsolutePath();
       System.setProperty(TEST_DIR_PROP, TEST_DIR_ROOT);
 
       File dir = new File(TEST_DIR_ROOT);
@@ -83,8 +83,6 @@ public class TestDirHelper implements Me
         System.exit(-1);
       }
 
-      System.setProperty("test.circus", "true");
-
       System.out.println(">>> " + TEST_DIR_PROP + "        : " + System.getProperty(TEST_DIR_PROP));
     } catch (IOException ex) {
       throw new RuntimeException(ex);

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java?rev=1365988&r1=1365987&r2=1365988&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java Thu Jul 26 13:39:05 2012
@@ -18,9 +18,11 @@
 package org.apache.hadoop.test;
 
 import java.net.InetAddress;
+import java.net.InetSocketAddress;
 import java.net.MalformedURLException;
 import java.net.ServerSocket;
 import java.net.URL;
+import java.net.UnknownHostException;
 
 import org.junit.Test;
 import org.junit.rules.MethodRule;
@@ -65,9 +67,9 @@ public class TestJettyHelper implements 
 
   private Server createJettyServer() {
     try {
-
-      String host = InetAddress.getLocalHost().getHostName();
-      ServerSocket ss = new ServerSocket(0);
+      InetAddress localhost = InetAddress.getByName("localhost");
+      String host = "localhost";
+      ServerSocket ss = new ServerSocket(0, 50, localhost);
       int port = ss.getLocalPort();
       ss.close();
       Server server = new Server(0);
@@ -80,6 +82,23 @@ public class TestJettyHelper implements 
   }
 
   /**
+   * Returns the authority (hostname & port) used by the JettyServer.
+   *
+   * @return an <code>InetSocketAddress</code> with the corresponding authority.
+   */
+  public static InetSocketAddress getAuthority() {
+    Server server = getJettyServer();
+    try {
+      InetAddress add =
+        InetAddress.getByName(server.getConnectors()[0].getHost());
+      int port = server.getConnectors()[0].getPort();
+      return new InetSocketAddress(add, port);
+    } catch (UnknownHostException ex) {
+      throw new RuntimeException(ex);
+    }
+  }
+
+  /**
    * Returns a Jetty server ready to be configured and the started. This server
    * is only available when the test method has been annotated with
    * {@link TestJetty}. Refer to {@link HTestCase} header for details.



Mime
View raw message