hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From t...@apache.org
Subject svn commit: r1212060 [1/8] - in /hadoop/common/trunk/hadoop-hdfs-project: ./ hadoop-hdfs-httpfs/ hadoop-hdfs-httpfs/src/ hadoop-hdfs-httpfs/src/main/ hadoop-hdfs-httpfs/src/main/conf/ hadoop-hdfs-httpfs/src/main/java/ hadoop-hdfs-httpfs/src/main/java/o...
Date Thu, 08 Dec 2011 19:25:33 GMT
Author: tucu
Date: Thu Dec  8 19:25:28 2011
New Revision: 1212060

URL: http://svn.apache.org/viewvc?rev=1212060&view=rev
Log:
HDFS-2178. Contributing Hoop to HDFS, replacement for HDFS proxy with read/write capabilities. (tucu)

Added:
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/README.txt
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-log4j.properties
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-site.xml
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpKerberosAuthenticator.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpPseudoAuthenticator.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/AuthFilter.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSExceptionProvider.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParams.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSReleaseFilter.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebApp.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/RunnableCallable.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/XException.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/BaseService.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServerException.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Service.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServiceException.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/FileSystemAccess.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/FileSystemAccessException.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Groups.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Instrumentation.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/ProxyUser.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Scheduler.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/instrumentation/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/instrumentation/InstrumentationService.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/scheduler/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/scheduler/SchedulerService.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/GroupsService.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/ProxyUserService.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/FileSystemReleaseFilter.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/HostnameFilter.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/MDCFilter.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/Check.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/ConfigurationUtils.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/BooleanParam.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ByteParam.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ExceptionProvider.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/InputStreamEntity.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/IntegerParam.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONMapProvider.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONProvider.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/LongParam.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Param.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ShortParam.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/StringParam.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/UserProvider.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/default-log4j.properties
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs.properties
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/tomcat/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/tomcat/ROOT/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/tomcat/ROOT/WEB-INF/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/tomcat/ROOT/WEB-INF/web.xml
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/tomcat/ROOT/index.html
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/tomcat/logging.properties
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/tomcat/server.xml
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/webapp/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/webapp/WEB-INF/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/webapp/WEB-INF/web.xml
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/apt/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/apt/ServerSetup.apt.vm
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/apt/UsingHttpTools.apt.vm
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/apt/index.apt.vm
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/configuration.xsl
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/site.xml
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystem.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestWebhdfsFileSystem.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/lang/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/lang/TestRunnableCallable.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/lang/TestXException.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/server/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/server/TestBaseService.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/server/TestServer.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/server/TestServerConstructor.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/hadoop/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/hadoop/TestFileSystemAccessService.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/instrumentation/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/instrumentation/TestInstrumentationService.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/scheduler/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/scheduler/TestSchedulerService.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/security/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/security/TestGroupsService.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/security/TestProxyUserService.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/servlet/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/servlet/TestHostnameFilter.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/servlet/TestMDCFilter.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/servlet/TestServerWebApp.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/util/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/util/TestCheck.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/util/TestConfigurationUtils.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestBooleanParam.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestByteParam.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestEnumParam.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestInputStreamEntity.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestIntegerParam.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestJSONMapProvider.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestJSONProvider.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestLongParam.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestShortParam.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestStringParam.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestUserProvider.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/HFSTestCase.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/HTestCase.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/HadoopUsersConfTestHelper.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/SysPropsForTestsLoader.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestDir.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestDirHelper.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestException.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestExceptionHelper.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHFSTestCase.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHTestCase.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHdfs.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHdfsHelper.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJetty.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/TestServerWebApp1.properties
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/TestServerWebApp2.properties
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/classutils.txt
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/default-log4j.properties
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/server.properties
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/testserver-default.xml
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/testserver.properties
Modified:
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
    hadoop/common/trunk/hadoop-hdfs-project/pom.xml

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/README.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/README.txt?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/README.txt (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/README.txt Thu Dec  8 19:25:28 2011
@@ -0,0 +1,17 @@
+-----------------------------------------------------------------------------
+HttpFS - Hadoop HDFS over HTTP
+
+HttpFS is a server that provides a REST HTTP gateway to HDFS with full
+filesystem read & write capabilities.
+
+HttpFS can be used to transfer data between clusters running different
+versions of Hadoop (overcoming RPC versioning issues), for example using
+Hadoop DistCP.
+
+HttpFS can be used to access data in HDFS on a cluster behind of a firewall
+(the HttpFS server acts as a gateway and is the only system that is allowed
+to cross the firewall into the cluster).
+
+HttpFS can be used to access data in HDFS using HTTP utilities (such as curl
+and wget) and HTTP libraries Perl from other languages than Java.
+-----------------------------------------------------------------------------

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml Thu Dec  8 19:25:28 2011
@@ -0,0 +1,530 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+
+
+-->
+<project>
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>hadoop-project</artifactId>
+    <version>0.24.0-SNAPSHOT</version>
+    <relativePath>../../hadoop-project</relativePath>
+  </parent>
+  <groupId>org.apache.hadoop</groupId>
+  <artifactId>hadoop-hdfs-httpfs</artifactId>
+  <version>0.24.0-SNAPSHOT</version>
+  <packaging>war</packaging>
+
+  <name>Apache Hadoop HttpFS</name>
+  <description>Apache Hadoop HttpFS</description>
+
+  <properties>
+    <tomcat.version>6.0.32</tomcat.version>
+    <httpfs.source.repository>REPO NOT AVAIL</httpfs.source.repository>
+    <httpfs.source.repository>REPO NOT AVAIL</httpfs.source.repository>
+    <httpfs.source.revision>REVISION NOT AVAIL</httpfs.source.revision>
+    <maven.build.timestamp.format>yyyy-MM-dd'T'HH:mm:ssZ</maven.build.timestamp.format>
+    <httpfs.build.timestamp>${maven.build.timestamp}</httpfs.build.timestamp>
+    <httpfs.tomcat.dist.dir>
+      ${project.build.directory}/${project.artifactId}-${project.version}/share/hadoop/httpfs/tomcat
+    </httpfs.tomcat.dist.dir>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.mockito</groupId>
+      <artifactId>mockito-all</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-server</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>javax.servlet</groupId>
+      <artifactId>servlet-api</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>javax.servlet.jsp</groupId>
+      <artifactId>jsp-api</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>commons-codec</groupId>
+      <artifactId>commons-codec</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.jdom</groupId>
+      <artifactId>jdom</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.googlecode.json-simple</groupId>
+      <artifactId>json-simple</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <scope>compile</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>javax.xml.stream</groupId>
+          <artifactId>stax-api</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>commons-cli</groupId>
+          <artifactId>commons-cli</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>commons-httpclient</groupId>
+          <artifactId>commons-httpclient</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>tomcat</groupId>
+          <artifactId>jasper-compiler</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>tomcat</groupId>
+          <artifactId>jasper-runtime</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>javax.servlet</groupId>
+          <artifactId>servlet-api</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>javax.servlet</groupId>
+          <artifactId>jsp-api</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>javax.servlet.jsp</groupId>
+          <artifactId>jsp-api</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.mortbay.jetty</groupId>
+          <artifactId>jetty</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.mortbay.jetty</groupId>
+          <artifactId>jetty-util</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.mortbay.jetty</groupId>
+          <artifactId>jsp-api-2.1</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.mortbay.jetty</groupId>
+          <artifactId>servlet-api-2.5</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>net.java.dev.jets3t</groupId>
+          <artifactId>jets3t</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>hsqldb</groupId>
+          <artifactId>hsqldb</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.eclipse.jdt</groupId>
+          <artifactId>core</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>commons-el</groupId>
+          <artifactId>commons-el</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <scope>compile</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>commons-cli</groupId>
+          <artifactId>commons-cli</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>commons-httpclient</groupId>
+          <artifactId>commons-httpclient</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>tomcat</groupId>
+          <artifactId>jasper-compiler</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>tomcat</groupId>
+          <artifactId>jasper-runtime</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>javax.servlet</groupId>
+          <artifactId>servlet-api</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>javax.servlet</groupId>
+          <artifactId>jsp-api</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>javax.servlet.jsp</groupId>
+          <artifactId>jsp-api</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.mortbay.jetty</groupId>
+          <artifactId>jetty</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.mortbay.jetty</groupId>
+          <artifactId>jetty-util</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.mortbay.jetty</groupId>
+          <artifactId>jsp-api-2.1</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.mortbay.jetty</groupId>
+          <artifactId>servlet-api-2.5</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>net.java.dev.jets3t</groupId>
+          <artifactId>jets3t</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>hsqldb</groupId>
+          <artifactId>hsqldb</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.eclipse.jdt</groupId>
+          <artifactId>core</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>commons-el</groupId>
+          <artifactId>commons-el</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <scope>test</scope>
+      <type>test-jar</type>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <scope>test</scope>
+      <type>test-jar</type>
+    </dependency>
+    <dependency>
+      <groupId>log4j</groupId>
+      <artifactId>log4j</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-log4j12</artifactId>
+      <scope>compile</scope>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <resources>
+      <resource>
+        <directory>src/main/resources</directory>
+        <filtering>true</filtering>
+        <includes>
+          <include>httpfs.properties</include>
+        </includes>
+      </resource>
+      <resource>
+        <directory>src/main/resources</directory>
+        <filtering>false</filtering>
+        <excludes>
+          <exclude>httpfs.properties</exclude>
+        </excludes>
+      </resource>
+    </resources>
+
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <threadCount>1</threadCount>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-javadoc-plugin</artifactId>
+        <executions>
+          <execution>
+            <goals>
+              <goal>javadoc</goal>
+            </goals>
+            <phase>site</phase>
+            <configuration>
+              <linksource>true</linksource>
+              <quiet>true</quiet>
+              <verbose>false</verbose>
+              <source>${maven.compile.source}</source>
+              <charset>${maven.compile.encoding}</charset>
+              <groups>
+                <group>
+                  <title>HttpFs API</title>
+                  <packages>*</packages>
+                </group>
+              </groups>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-project-info-reports-plugin</artifactId>
+        <executions>
+          <execution>
+            <configuration>
+              <dependencyLocationsEnabled>false</dependencyLocationsEnabled>
+            </configuration>
+            <goals>
+              <goal>dependencies</goal>
+            </goals>
+            <phase>site</phase>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.rat</groupId>
+        <artifactId>apache-rat-plugin</artifactId>
+        <configuration>
+          <excludes>
+          </excludes>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-antrun-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>create-web-xmls</id>
+            <phase>generate-test-resources</phase>
+            <goals>
+              <goal>run</goal>
+            </goals>
+            <configuration>
+              <target>
+                <mkdir dir="${project.build.directory}/test-classes/webapp"/>
+
+                <copy todir="${project.build.directory}/test-classes/webapp">
+                  <fileset dir="${basedir}/src/main/webapp"/>
+                </copy>
+              </target>
+            </configuration>
+          </execution>
+          <execution>
+            <id>site</id>
+            <phase>site</phase>
+            <goals>
+              <goal>run</goal>
+            </goals>
+            <configuration>
+              <target>
+                <xslt in="${basedir}/src/main/resources/httpfs-default.xml"
+                      out="${project.build.directory}/site/httpfs-default.html"
+                      style="${basedir}/src/site/configuration.xsl"/>
+              </target>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-war-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>default-war</id>
+            <phase>package</phase>
+            <goals>
+              <goal>war</goal>
+            </goals>
+            <configuration>
+              <warName>webhdfs</warName>
+              <webappDirectory>${project.build.directory}/webhdfs</webappDirectory>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+  <profiles>
+    <profile>
+      <id>docs</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-site-plugin</artifactId>
+            <executions>
+              <execution>
+                <id>docs</id>
+                <phase>prepare-package</phase>
+                <goals>
+                  <goal>site</goal>
+                </goals>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+
+    <profile>
+      <id>dist</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-assembly-plugin</artifactId>
+            <dependencies>
+              <dependency>
+                <groupId>org.apache.hadoop</groupId>
+                <artifactId>hadoop-assemblies</artifactId>
+                <version>${project.version}</version>
+              </dependency>
+            </dependencies>
+            <executions>
+              <execution>
+                <id>dist</id>
+                <phase>package</phase>
+                <goals>
+                  <goal>single</goal>
+                </goals>
+                <configuration>
+                  <finalName>${project.artifactId}-${project.version}</finalName>
+                  <appendAssemblyId>false</appendAssemblyId>
+                  <attach>false</attach>
+                  <descriptorRefs>
+                    <descriptorRef>hadoop-httpfs-dist</descriptorRef>
+                  </descriptorRefs>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+          <!-- Downloading Tomcat TAR.GZ, using downloads/ dir to avoid downloading over an over -->
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-antrun-plugin</artifactId>
+            <executions>
+              <execution>
+                <id>dist</id>
+                <goals>
+                  <goal>run</goal>
+                </goals>
+                <phase>package</phase>
+                <configuration>
+                  <target>
+                    <mkdir dir="downloads"/>
+                    <get
+                        src="http://archive.apache.org/dist/tomcat/tomcat-6/v${tomcat.version}/bin/apache-tomcat-${tomcat.version}.tar.gz"
+                        dest="downloads/tomcat.tar.gz" verbose="true" skipexisting="true"/>
+                    <delete dir="${project.build.directory}/tomcat.exp"/>
+                    <mkdir dir="${project.build.directory}/tomcat.exp"/>
+
+                    <!-- Using Unix script to preserve file permissions -->
+                    <echo file="${project.build.directory}/tomcat-untar.sh">
+
+                      which cygpath 2> /dev/null
+                      if [ $? = 1 ]; then
+                      BUILD_DIR="${project.build.directory}"
+                      else
+                      BUILD_DIR=`cygpath --unix '${project.build.directory}'`
+                      fi
+                      cd $BUILD_DIR/tomcat.exp
+                      tar xzf ${basedir}/downloads/tomcat.tar.gz
+                    </echo>
+                    <exec executable="sh" dir="${project.build.directory}" failonerror="true">
+                      <arg line="./tomcat-untar.sh"/>
+                    </exec>
+
+                    <move file="${project.build.directory}/tomcat.exp/apache-tomcat-${tomcat.version}"
+                          tofile="${httpfs.tomcat.dist.dir}"/>
+                    <delete dir="${project.build.directory}/tomcat.exp"/>
+                    <delete dir="${httpfs.tomcat.dist.dir}/webapps"/>
+                    <mkdir dir="${httpfs.tomcat.dist.dir}/webapps"/>
+                    <delete file="${httpfs.tomcat.dist.dir}/conf/server.xml"/>
+                    <copy file="${basedir}/src/main/tomcat/server.xml"
+                          toDir="${httpfs.tomcat.dist.dir}/conf"/>
+                    <copy file="${basedir}/src/main/tomcat/logging.properties"
+                          toDir="${httpfs.tomcat.dist.dir}/conf"/>
+                    <copy toDir="${httpfs.tomcat.dist.dir}/webapps/ROOT">
+                      <fileset dir="${basedir}/src/main/tomcat/ROOT"/>
+                    </copy>
+                    <copy toDir="${httpfs.tomcat.dist.dir}/webapps/webhdfs">
+                      <fileset dir="${project.build.directory}/webhdfs"/>
+                    </copy>
+                  </target>
+                </configuration>
+              </execution>
+              <execution>
+                <id>tar</id>
+                <phase>package</phase>
+                <goals>
+                  <goal>run</goal>
+                </goals>
+                <configuration>
+                  <target if="tar">
+                    <!-- Using Unix script to preserve symlinks -->
+                    <echo file="${project.build.directory}/dist-maketar.sh">
+
+                      which cygpath 2> /dev/null
+                      if [ $? = 1 ]; then
+                      BUILD_DIR="${project.build.directory}"
+                      else
+                      BUILD_DIR=`cygpath --unix '${project.build.directory}'`
+                      fi
+                      cd $BUILD_DIR
+                      tar czf ${project.artifactId}-${project.version}.tar.gz ${project.artifactId}-${project.version}
+                    </echo>
+                    <exec executable="sh" dir="${project.build.directory}" failonerror="true">
+                      <arg line="./dist-maketar.sh"/>
+                    </exec>
+                  </target>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+  </profiles>
+</project>

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh Thu Dec  8 19:25:28 2011
@@ -0,0 +1,41 @@
+#!/bin/bash
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License. See accompanying LICENSE file.
+#
+
+# Set httpfs specific environment variables here.
+
+# Settings for the Embedded Tomcat that runs HttpFS
+# Java System properties for HttpFS should be specified in this variable
+#
+# export CATALINA_OPTS=
+
+# HttpFS logs directory
+#
+# export HTTPFS_LOG=${HTTPFS_HOME}/logs
+
+# HttpFS temporary directory
+#
+# export HTTPFS_TEMP=${HTTPFS_HOME}/temp
+
+# The HTTP port used by HttpFS
+#
+# export HTTPFS_HTTP_PORT=14000
+
+# The Admin port used by HttpFS
+#
+# export HTTPFS_ADMIN_PORT=`expr ${HTTPFS_HTTP_PORT} + 1`
+
+# The hostname HttpFS server runs on
+#
+# export HTTPFS_HTTP_HOSTNAME=`hostname -f`

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-log4j.properties
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-log4j.properties?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-log4j.properties (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-log4j.properties Thu Dec  8 19:25:28 2011
@@ -0,0 +1,35 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License. See accompanying LICENSE file.
+#
+
+# If the Java System property 'httpfs.log.dir' is not defined at HttpFSServer start up time
+# Setup sets its value to '${httpfs.home}/logs'
+
+log4j.appender.httpfs=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.httpfs.DatePattern='.'yyyy-MM-dd
+log4j.appender.httpfs.File=${httpfs.log.dir}/httpfs.log
+log4j.appender.httpfs.Append=true
+log4j.appender.httpfs.layout=org.apache.log4j.PatternLayout
+log4j.appender.httpfs.layout.ConversionPattern=%d{ISO8601} %5p %c{1} [%X{hostname}][%X{user}:%X{doAs}] %X{op} %m%n
+
+log4j.appender.httpfsaudit=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.httpfsaudit.DatePattern='.'yyyy-MM-dd
+log4j.appender.httpfsaudit.File=${httpfs.log.dir}/httpfs-audit.log
+log4j.appender.httpfsaudit.Append=true
+log4j.appender.httpfsaudit.layout=org.apache.log4j.PatternLayout
+log4j.appender.httpfsaudit.layout.ConversionPattern=%d{ISO8601} %5p [%X{hostname}][%X{user}:%X{doAs}] %X{op} %m%n
+
+log4j.logger.httpfsaudit=INFO, httpfsaudit
+
+log4j.logger.org.apache.hadoop.fs.http.server=INFO, httpfs
+log4j.logger.org.apache.hadoop.lib=INFO, httpfs

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-site.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-site.xml?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-site.xml (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-site.xml Thu Dec  8 19:25:28 2011
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<configuration>
+
+</configuration>

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,863 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.http.client;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.ContentSummary;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileChecksum;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.PositionedReadable;
+import org.apache.hadoop.fs.Seekable;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
+import org.apache.hadoop.security.authentication.client.Authenticator;
+import org.apache.hadoop.util.Progressable;
+import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.util.StringUtils;
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.FileNotFoundException;
+import java.io.FilterInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.lang.reflect.Constructor;
+import java.net.HttpURLConnection;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.net.URLEncoder;
+import java.text.MessageFormat;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * HttpFSServer implementation of the FileSystemAccess FileSystem.
+ * <p/>
+ * This implementation allows a user to access HDFS over HTTP via a HttpFSServer server.
+ */
+public class HttpFSFileSystem extends FileSystem {
+
+  public static final String SERVICE_NAME = "/webhdfs";
+
+  public static final String SERVICE_VERSION = "/v1";
+
+  public static final String SERVICE_PREFIX = SERVICE_NAME + SERVICE_VERSION;
+
+  public static final String OP_PARAM = "op";
+  public static final String DO_AS_PARAM = "doas";
+  public static final String OVERWRITE_PARAM = "overwrite";
+  public static final String REPLICATION_PARAM = "replication";
+  public static final String BLOCKSIZE_PARAM = "blocksize";
+  public static final String PERMISSION_PARAM = "permission";
+  public static final String DESTINATION_PARAM = "destination";
+  public static final String RECURSIVE_PARAM = "recursive";
+  public static final String OWNER_PARAM = "owner";
+  public static final String GROUP_PARAM = "group";
+  public static final String MODIFICATION_TIME_PARAM = "modificationtime";
+  public static final String ACCESS_TIME_PARAM = "accesstime";
+  public static final String RENEWER_PARAM = "renewer";
+
+  public static final String DEFAULT_PERMISSION = "default";
+
+  public static final String RENAME_JSON = "boolean";
+
+  public static final String DELETE_JSON = "boolean";
+
+  public static final String MKDIRS_JSON = "boolean";
+
+  public static final String HOME_DIR_JSON = "Path";
+
+  public static final String SET_REPLICATION_JSON = "boolean";
+
+  public static enum FILE_TYPE {
+    FILE, DIRECTORY, SYMLINK;
+
+    public static FILE_TYPE getType(FileStatus fileStatus) {
+      if (fileStatus.isFile()) {
+        return FILE;
+      }
+      if (fileStatus.isDirectory()) {
+        return DIRECTORY;
+      }
+      if (fileStatus.isSymlink()) {
+        return SYMLINK;
+      }
+      throw new IllegalArgumentException("Could not determine filetype for: " +
+                                         fileStatus.getPath());
+    }
+  }
+
+  public static final String FILE_STATUSES_JSON = "FileStatuses";
+  public static final String FILE_STATUS_JSON = "FileStatus";
+  public static final String PATH_SUFFIX_JSON = "pathSuffix";
+  public static final String TYPE_JSON = "type";
+  public static final String LENGTH_JSON = "length";
+  public static final String OWNER_JSON = "owner";
+  public static final String GROUP_JSON = "group";
+  public static final String PERMISSION_JSON = "permission";
+  public static final String ACCESS_TIME_JSON = "accessTime";
+  public static final String MODIFICATION_TIME_JSON = "modificationTime";
+  public static final String BLOCK_SIZE_JSON = "blockSize";
+  public static final String REPLICATION_JSON = "replication";
+
+  public static final String FILE_CHECKSUM_JSON = "FileChecksum";
+  public static final String CHECKSUM_ALGORITHM_JSON = "algorithm";
+  public static final String CHECKSUM_BYTES_JSON = "bytes";
+  public static final String CHECKSUM_LENGTH_JSON = "length";
+
+  public static final String CONTENT_SUMMARY_JSON = "ContentSummary";
+  public static final String CONTENT_SUMMARY_DIRECTORY_COUNT_JSON = "directoryCount";
+  public static final String CONTENT_SUMMARY_FILE_COUNT_JSON = "fileCount";
+  public static final String CONTENT_SUMMARY_LENGTH_JSON = "length";
+  public static final String CONTENT_SUMMARY_QUOTA_JSON = "quota";
+  public static final String CONTENT_SUMMARY_SPACE_CONSUMED_JSON = "spaceConsumed";
+  public static final String CONTENT_SUMMARY_SPACE_QUOTA_JSON = "spaceQuota";
+
+  public static final String DELEGATION_TOKEN_JSON = "Token";
+  public static final String DELEGATION_TOKEN_URL_STRING_JSON = "urlString";
+
+  public static final String ERROR_JSON = "RemoteException";
+  public static final String ERROR_EXCEPTION_JSON = "exception";
+  public static final String ERROR_CLASSNAME_JSON = "javaClassName";
+  public static final String ERROR_MESSAGE_JSON = "message";
+
+  public static final int HTTP_TEMPORARY_REDIRECT = 307;
+
+
+  /**
+   * Get operations.
+   */
+  public enum GetOpValues {
+    OPEN, GETFILESTATUS, LISTSTATUS, GETHOMEDIR, GETCONTENTSUMMARY, GETFILECHECKSUM,
+    GETDELEGATIONTOKEN, GETFILEBLOCKLOCATIONS, INSTRUMENTATION
+  }
+
+  /**
+   * Post operations.
+   */
+  public static enum PostOpValues {
+    APPEND
+  }
+
+  /**
+   * Put operations.
+   */
+  public static enum PutOpValues {
+    CREATE, MKDIRS, RENAME, SETOWNER, SETPERMISSION, SETREPLICATION, SETTIMES,
+    RENEWDELEGATIONTOKEN, CANCELDELEGATIONTOKEN
+  }
+
+  /**
+   * Delete operations.
+   */
+  public static enum DeleteOpValues {
+    DELETE
+  }
+
+  private static final String HTTP_GET = "GET";
+  private static final String HTTP_PUT = "PUT";
+  private static final String HTTP_POST = "POST";
+  private static final String HTTP_DELETE = "DELETE";
+
+  private AuthenticatedURL.Token authToken = new AuthenticatedURL.Token();
+  private URI uri;
+  private Path workingDir;
+  private String doAs;
+
+  /**
+   * Convenience method that creates a <code>HttpURLConnection</code> for the
+   * HttpFSServer file system operations.
+   * <p/>
+   * This methods performs and injects any needed authentication credentials
+   * via the {@link #getConnection(URL, String)} method
+   *
+   * @param method the HTTP method.
+   * @param params the query string parameters.
+   * @param path the file path
+   * @param makeQualified if the path should be 'makeQualified'
+   *
+   * @return a <code>HttpURLConnection</code> for the HttpFSServer server,
+   *         authenticated and ready to use for the specified path and file system operation.
+   *
+   * @throws IOException thrown if an IO error occurrs.
+   */
+  private HttpURLConnection getConnection(String method, Map<String, String> params,
+                                          Path path, boolean makeQualified) throws IOException {
+    params.put(DO_AS_PARAM, doAs);
+    if (makeQualified) {
+      path = makeQualified(path);
+    }
+    URI uri = path.toUri();
+    StringBuilder sb = new StringBuilder();
+    sb.append(uri.getScheme()).append("://").append(uri.getAuthority()).
+      append(SERVICE_PREFIX).append(uri.getPath());
+
+    String separator = "?";
+    for (Map.Entry<String, String> entry : params.entrySet()) {
+      sb.append(separator).append(entry.getKey()).append("=").
+        append(URLEncoder.encode(entry.getValue(), "UTF8"));
+      separator = "&";
+    }
+    URL url = new URL(sb.toString());
+    return getConnection(url, method);
+  }
+
+  /**
+   * Convenience method that creates a <code>HttpURLConnection</code> for the specified URL.
+   * <p/>
+   * This methods performs and injects any needed authentication credentials.
+   *
+   * @param url url to connect to.
+   * @param method the HTTP method.
+   *
+   * @return a <code>HttpURLConnection</code> for the HttpFSServer server, authenticated and ready to use for
+   *         the specified path and file system operation.
+   *
+   * @throws IOException thrown if an IO error occurrs.
+   */
+  private HttpURLConnection getConnection(URL url, String method) throws IOException {
+    Class<? extends Authenticator> klass =
+      getConf().getClass("httpfs.authenticator.class", HttpKerberosAuthenticator.class, Authenticator.class);
+    Authenticator authenticator = ReflectionUtils.newInstance(klass, getConf());
+    try {
+      HttpURLConnection conn = new AuthenticatedURL(authenticator).openConnection(url, authToken);
+      conn.setRequestMethod(method);
+      if (method.equals(HTTP_POST) || method.equals(HTTP_PUT)) {
+        conn.setDoOutput(true);
+      }
+      return conn;
+    } catch (Exception ex) {
+      throw new IOException(ex);
+    }
+  }
+
+  /**
+   * Convenience method that JSON Parses the <code>InputStream</code> of a <code>HttpURLConnection</code>.
+   *
+   * @param conn the <code>HttpURLConnection</code>.
+   *
+   * @return the parsed JSON object.
+   *
+   * @throws IOException thrown if the <code>InputStream</code> could not be JSON parsed.
+   */
+  private static Object jsonParse(HttpURLConnection conn) throws IOException {
+    try {
+      JSONParser parser = new JSONParser();
+      return parser.parse(new InputStreamReader(conn.getInputStream()));
+    } catch (ParseException ex) {
+      throw new IOException("JSON parser error, " + ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Validates the status of an <code>HttpURLConnection</code> against an expected HTTP
+   * status code. If the current status code is not the expected one it throws an exception
+   * with a detail message using Server side error messages if available.
+   *
+   * @param conn the <code>HttpURLConnection</code>.
+   * @param expected the expected HTTP status code.
+   *
+   * @throws IOException thrown if the current status code does not match the expected one.
+   */
+  private static void validateResponse(HttpURLConnection conn, int expected) throws IOException {
+    int status = conn.getResponseCode();
+    if (status != expected) {
+      try {
+        JSONObject json = (JSONObject) jsonParse(conn);
+        json = (JSONObject) json.get(ERROR_JSON);
+        String message = (String) json.get(ERROR_MESSAGE_JSON);
+        String exception = (String) json.get(ERROR_EXCEPTION_JSON);
+        String className = (String) json.get(ERROR_CLASSNAME_JSON);
+
+        try {
+          ClassLoader cl = HttpFSFileSystem.class.getClassLoader();
+          Class klass = cl.loadClass(className);
+          Constructor constr = klass.getConstructor(String.class);
+          throw (IOException) constr.newInstance(message);
+        } catch (IOException ex) {
+          throw ex;
+        } catch (Exception ex) {
+          throw new IOException(MessageFormat.format("{0} - {1}", exception, message));
+        }
+      } catch (IOException ex) {
+        if (ex.getCause() instanceof IOException) {
+          throw (IOException) ex.getCause();
+        }
+        throw new IOException(MessageFormat.format("HTTP status [{0}], {1}", status, conn.getResponseMessage()));
+      }
+    }
+  }
+
+  /**
+   * Called after a new FileSystem instance is constructed.
+   *
+   * @param name a uri whose authority section names the host, port, etc. for this FileSystem
+   * @param conf the configuration
+   */
+  @Override
+  public void initialize(URI name, Configuration conf) throws IOException {
+    UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
+    doAs = ugi.getUserName();
+    super.initialize(name, conf);
+    try {
+      uri = new URI(name.getScheme() + "://" + name.getHost() + ":" + name.getPort());
+    } catch (URISyntaxException ex) {
+      throw new IOException(ex);
+    }
+  }
+
+  /**
+   * Returns a URI whose scheme and authority identify this FileSystem.
+   *
+   * @return the URI whose scheme and authority identify this FileSystem.
+   */
+  @Override
+  public URI getUri() {
+    return uri;
+  }
+
+  /**
+   * HttpFSServer subclass of the <code>FSDataInputStream</code>.
+   * <p/>
+   * This implementation does not support the
+   * <code>PositionReadable</code> and <code>Seekable</code> methods.
+   */
+  private static class HttpFSDataInputStream extends FilterInputStream implements Seekable, PositionedReadable {
+
+    protected HttpFSDataInputStream(InputStream in, int bufferSize) {
+      super(new BufferedInputStream(in, bufferSize));
+    }
+
+    @Override
+    public int read(long position, byte[] buffer, int offset, int length) throws IOException {
+      throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public void readFully(long position, byte[] buffer, int offset, int length) throws IOException {
+      throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public void readFully(long position, byte[] buffer) throws IOException {
+      throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public void seek(long pos) throws IOException {
+      throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public long getPos() throws IOException {
+      throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public boolean seekToNewSource(long targetPos) throws IOException {
+      throw new UnsupportedOperationException();
+    }
+  }
+
+  /**
+   * Opens an FSDataInputStream at the indicated Path.
+   * </p>
+   * IMPORTANT: the returned <code><FSDataInputStream/code> does not support the
+   * <code>PositionReadable</code> and <code>Seekable</code> methods.
+   *
+   * @param f the file name to open
+   * @param bufferSize the size of the buffer to be used.
+   */
+  @Override
+  public FSDataInputStream open(Path f, int bufferSize) throws IOException {
+    Map<String, String> params = new HashMap<String, String>();
+    params.put(OP_PARAM, GetOpValues.OPEN.toString());
+    HttpURLConnection conn = getConnection(HTTP_GET, params, f, true);
+    validateResponse(conn, HttpURLConnection.HTTP_OK);
+    return new FSDataInputStream(new HttpFSDataInputStream(conn.getInputStream(), bufferSize));
+  }
+
+  /**
+   * HttpFSServer subclass of the <code>FSDataOutputStream</code>.
+   * <p/>
+   * This implementation closes the underlying HTTP connection validating the Http connection status
+   * at closing time.
+   */
+  private static class HttpFSDataOutputStream extends FSDataOutputStream {
+    private HttpURLConnection conn;
+    private int closeStatus;
+
+    public HttpFSDataOutputStream(HttpURLConnection conn, OutputStream out, int closeStatus, Statistics stats)
+      throws IOException {
+      super(out, stats);
+      this.conn = conn;
+      this.closeStatus = closeStatus;
+    }
+
+    @Override
+    public void close() throws IOException {
+      try {
+        super.close();
+      } finally {
+        validateResponse(conn, closeStatus);
+      }
+    }
+
+  }
+
+  /**
+   * Converts a <code>FsPermission</code> to a Unix octal representation.
+   *
+   * @param p the permission.
+   *
+   * @return the Unix string symbolic reprentation.
+   */
+  public static String permissionToString(FsPermission p) {
+    return (p == null) ? DEFAULT_PERMISSION : Integer.toString(p.toShort(), 8);
+  }
+
+  /*
+   * Common handling for uploading data for create and append operations.
+   */
+  private FSDataOutputStream uploadData(String method, Path f, Map<String, String> params,
+                                        int bufferSize, int expectedStatus) throws IOException {
+    HttpURLConnection conn = getConnection(method, params, f, true);
+    conn.setInstanceFollowRedirects(false);
+    boolean exceptionAlreadyHandled = false;
+    try {
+      if (conn.getResponseCode() == HTTP_TEMPORARY_REDIRECT) {
+        exceptionAlreadyHandled = true;
+        String location = conn.getHeaderField("Location");
+        if (location != null) {
+          conn = getConnection(new URL(location), method);
+          conn.setRequestProperty("Content-Type", "application/octet-stream");
+          try {
+            OutputStream os = new BufferedOutputStream(conn.getOutputStream(), bufferSize);
+            return new HttpFSDataOutputStream(conn, os, expectedStatus, statistics);
+          } catch (IOException ex) {
+            validateResponse(conn, expectedStatus);
+            throw ex;
+          }
+        } else {
+          validateResponse(conn, HTTP_TEMPORARY_REDIRECT);
+          throw new IOException("Missing HTTP 'Location' header for [" + conn.getURL() + "]");
+        }
+      } else {
+        throw new IOException(
+          MessageFormat.format("Expected HTTP status was [307], received [{0}]",
+                               conn.getResponseCode()));
+      }
+    } catch (IOException ex) {
+      if (exceptionAlreadyHandled) {
+        throw ex;
+      } else {
+        validateResponse(conn, HTTP_TEMPORARY_REDIRECT);
+        throw ex;
+      }
+    }
+  }
+
+
+  /**
+   * Opens an FSDataOutputStream at the indicated Path with write-progress
+   * reporting.
+   * <p/>
+   * IMPORTANT: The <code>Progressable</code> parameter is not used.
+   *
+   * @param f the file name to open.
+   * @param permission file permission.
+   * @param overwrite if a file with this name already exists, then if true,
+   * the file will be overwritten, and if false an error will be thrown.
+   * @param bufferSize the size of the buffer to be used.
+   * @param replication required block replication for the file.
+   * @param blockSize block size.
+   * @param progress progressable.
+   *
+   * @throws IOException
+   * @see #setPermission(Path, FsPermission)
+   */
+  @Override
+  public FSDataOutputStream create(Path f, FsPermission permission, boolean overwrite, int bufferSize,
+                                   short replication, long blockSize, Progressable progress) throws IOException {
+    Map<String, String> params = new HashMap<String, String>();
+    params.put(OP_PARAM, PutOpValues.CREATE.toString());
+    params.put(OVERWRITE_PARAM, Boolean.toString(overwrite));
+    params.put(REPLICATION_PARAM, Short.toString(replication));
+    params.put(BLOCKSIZE_PARAM, Long.toString(blockSize));
+    params.put(PERMISSION_PARAM, permissionToString(permission));
+    return uploadData(HTTP_PUT, f, params, bufferSize, HttpURLConnection.HTTP_CREATED);
+  }
+
+
+  /**
+   * Append to an existing file (optional operation).
+   * <p/>
+   * IMPORTANT: The <code>Progressable</code> parameter is not used.
+   *
+   * @param f the existing file to be appended.
+   * @param bufferSize the size of the buffer to be used.
+   * @param progress for reporting progress if it is not null.
+   *
+   * @throws IOException
+   */
+  @Override
+  public FSDataOutputStream append(Path f, int bufferSize, Progressable progress) throws IOException {
+    Map<String, String> params = new HashMap<String, String>();
+    params.put(OP_PARAM, PostOpValues.APPEND.toString());
+    return uploadData(HTTP_POST, f, params, bufferSize, HttpURLConnection.HTTP_OK);
+  }
+
+  /**
+   * Renames Path src to Path dst.  Can take place on local fs
+   * or remote DFS.
+   */
+  @Override
+  public boolean rename(Path src, Path dst) throws IOException {
+    Map<String, String> params = new HashMap<String, String>();
+    params.put(OP_PARAM, PutOpValues.RENAME.toString());
+    params.put(DESTINATION_PARAM, dst.toString());
+    HttpURLConnection conn = getConnection(HTTP_PUT, params, src, true);
+    validateResponse(conn, HttpURLConnection.HTTP_OK);
+    JSONObject json = (JSONObject) jsonParse(conn);
+    return (Boolean) json.get(RENAME_JSON);
+  }
+
+  /**
+   * Delete a file.
+   *
+   * @deprecated Use delete(Path, boolean) instead
+   */
+  @SuppressWarnings({"deprecation"})
+  @Deprecated
+  @Override
+  public boolean delete(Path f) throws IOException {
+    return delete(f, false);
+  }
+
+  /**
+   * Delete a file.
+   *
+   * @param f the path to delete.
+   * @param recursive if path is a directory and set to
+   * true, the directory is deleted else throws an exception. In
+   * case of a file the recursive can be set to either true or false.
+   *
+   * @return true if delete is successful else false.
+   *
+   * @throws IOException
+   */
+  @Override
+  public boolean delete(Path f, boolean recursive) throws IOException {
+    Map<String, String> params = new HashMap<String, String>();
+    params.put(OP_PARAM, DeleteOpValues.DELETE.toString());
+    params.put(RECURSIVE_PARAM, Boolean.toString(recursive));
+    HttpURLConnection conn = getConnection(HTTP_DELETE, params, f, true);
+    validateResponse(conn, HttpURLConnection.HTTP_OK);
+    JSONObject json = (JSONObject) jsonParse(conn);
+    return (Boolean) json.get(DELETE_JSON);
+  }
+
+  /**
+   * List the statuses of the files/directories in the given path if the path is
+   * a directory.
+   *
+   * @param f given path
+   *
+   * @return the statuses of the files/directories in the given patch
+   *
+   * @throws IOException
+   */
+  @Override
+  public FileStatus[] listStatus(Path f) throws IOException {
+    Map<String, String> params = new HashMap<String, String>();
+    params.put(OP_PARAM, GetOpValues.LISTSTATUS.toString());
+    HttpURLConnection conn = getConnection(HTTP_GET, params, f, true);
+    validateResponse(conn, HttpURLConnection.HTTP_OK);
+    JSONObject json = (JSONObject) jsonParse(conn);
+    json = (JSONObject) json.get(FILE_STATUSES_JSON);
+    JSONArray jsonArray = (JSONArray) json.get(FILE_STATUS_JSON);
+    FileStatus[] array = new FileStatus[jsonArray.size()];
+    f = makeQualified(f);
+    for (int i = 0; i < jsonArray.size(); i++) {
+      array[i] = createFileStatus(f, (JSONObject) jsonArray.get(i));
+    }
+    return array;
+  }
+
+  /**
+   * Set the current working directory for the given file system. All relative
+   * paths will be resolved relative to it.
+   *
+   * @param newDir new directory.
+   */
+  @Override
+  public void setWorkingDirectory(Path newDir) {
+    workingDir = newDir;
+  }
+
+  /**
+   * Get the current working directory for the given file system
+   *
+   * @return the directory pathname
+   */
+  @Override
+  public Path getWorkingDirectory() {
+    if (workingDir == null) {
+      workingDir = getHomeDirectory();
+    }
+    return workingDir;
+  }
+
+  /**
+   * Make the given file and all non-existent parents into
+   * directories. Has the semantics of Unix 'mkdir -p'.
+   * Existence of the directory hierarchy is not an error.
+   */
+  @Override
+  public boolean mkdirs(Path f, FsPermission permission) throws IOException {
+    Map<String, String> params = new HashMap<String, String>();
+    params.put(OP_PARAM, PutOpValues.MKDIRS.toString());
+    params.put(PERMISSION_PARAM, permissionToString(permission));
+    HttpURLConnection conn = getConnection(HTTP_PUT, params, f, true);
+    validateResponse(conn, HttpURLConnection.HTTP_OK);
+    JSONObject json = (JSONObject) jsonParse(conn);
+    return (Boolean) json.get(MKDIRS_JSON);
+  }
+
+  /**
+   * Return a file status object that represents the path.
+   *
+   * @param f The path we want information from
+   *
+   * @return a FileStatus object
+   *
+   * @throws FileNotFoundException when the path does not exist;
+   * IOException see specific implementation
+   */
+  @Override
+  public FileStatus getFileStatus(Path f) throws IOException {
+    Map<String, String> params = new HashMap<String, String>();
+    params.put(OP_PARAM, GetOpValues.GETFILESTATUS.toString());
+    HttpURLConnection conn = getConnection(HTTP_GET, params, f, true);
+    validateResponse(conn, HttpURLConnection.HTTP_OK);
+    JSONObject json = (JSONObject) jsonParse(conn);
+    json = (JSONObject) json.get(FILE_STATUS_JSON);
+    f = makeQualified(f);
+    return createFileStatus(f, json);
+  }
+
+  /**
+   * Return the current user's home directory in this filesystem.
+   * The default implementation returns "/user/$USER/".
+   */
+  @Override
+  public Path getHomeDirectory() {
+    Map<String, String> params = new HashMap<String, String>();
+    params.put(OP_PARAM, GetOpValues.GETHOMEDIR.toString());
+    try {
+      HttpURLConnection conn = getConnection(HTTP_GET, params, new Path(getUri().toString(), "/"), false);
+      validateResponse(conn, HttpURLConnection.HTTP_OK);
+      JSONObject json = (JSONObject) jsonParse(conn);
+      return new Path((String) json.get(HOME_DIR_JSON));
+    } catch (IOException ex) {
+      throw new RuntimeException(ex);
+    }
+  }
+
+  /**
+   * Set owner of a path (i.e. a file or a directory).
+   * The parameters username and groupname cannot both be null.
+   *
+   * @param p The path
+   * @param username If it is null, the original username remains unchanged.
+   * @param groupname If it is null, the original groupname remains unchanged.
+   */
+  @Override
+  public void setOwner(Path p, String username, String groupname) throws IOException {
+    Map<String, String> params = new HashMap<String, String>();
+    params.put(OP_PARAM, PutOpValues.SETOWNER.toString());
+    params.put(OWNER_PARAM, username);
+    params.put(GROUP_PARAM, groupname);
+    HttpURLConnection conn = getConnection(HTTP_PUT, params, p, true);
+    validateResponse(conn, HttpURLConnection.HTTP_OK);
+  }
+
+  /**
+   * Set permission of a path.
+   *
+   * @param p path.
+   * @param permission permission.
+   */
+  @Override
+  public void setPermission(Path p, FsPermission permission) throws IOException {
+    Map<String, String> params = new HashMap<String, String>();
+    params.put(OP_PARAM, PutOpValues.SETPERMISSION.toString());
+    params.put(PERMISSION_PARAM, permissionToString(permission));
+    HttpURLConnection conn = getConnection(HTTP_PUT, params, p, true);
+    validateResponse(conn, HttpURLConnection.HTTP_OK);
+  }
+
+  /**
+   * Set access time of a file
+   *
+   * @param p The path
+   * @param mtime Set the modification time of this file.
+   * The number of milliseconds since Jan 1, 1970.
+   * A value of -1 means that this call should not set modification time.
+   * @param atime Set the access time of this file.
+   * The number of milliseconds since Jan 1, 1970.
+   * A value of -1 means that this call should not set access time.
+   */
+  @Override
+  public void setTimes(Path p, long mtime, long atime) throws IOException {
+    Map<String, String> params = new HashMap<String, String>();
+    params.put(OP_PARAM, PutOpValues.SETTIMES.toString());
+    params.put(MODIFICATION_TIME_PARAM, Long.toString(mtime));
+    params.put(ACCESS_TIME_PARAM, Long.toString(atime));
+    HttpURLConnection conn = getConnection(HTTP_PUT, params, p, true);
+    validateResponse(conn, HttpURLConnection.HTTP_OK);
+  }
+
+  /**
+   * Set replication for an existing file.
+   *
+   * @param src file name
+   * @param replication new replication
+   *
+   * @return true if successful;
+   *         false if file does not exist or is a directory
+   *
+   * @throws IOException
+   */
+  @Override
+  public boolean setReplication(Path src, short replication) throws IOException {
+    Map<String, String> params = new HashMap<String, String>();
+    params.put(OP_PARAM, PutOpValues.SETREPLICATION.toString());
+    params.put(REPLICATION_PARAM, Short.toString(replication));
+    HttpURLConnection conn = getConnection(HTTP_PUT, params, src, true);
+    validateResponse(conn, HttpURLConnection.HTTP_OK);
+    JSONObject json = (JSONObject) jsonParse(conn);
+    return (Boolean) json.get(SET_REPLICATION_JSON);
+  }
+
+  /**
+   * Creates a <code>FileStatus</code> object using a JSON file-status payload
+   * received from a HttpFSServer server.
+   *
+   * @param json a JSON file-status payload received from a HttpFSServer server
+   *
+   * @return the corresponding <code>FileStatus</code>
+   */
+  private FileStatus createFileStatus(Path parent, JSONObject json) {
+    String pathSuffix = (String) json.get(PATH_SUFFIX_JSON);
+    Path path = (pathSuffix.equals("")) ? parent : new Path(parent, pathSuffix);
+    FILE_TYPE type = FILE_TYPE.valueOf((String) json.get(TYPE_JSON));
+    long len = (Long) json.get(LENGTH_JSON);
+    String owner = (String) json.get(OWNER_JSON);
+    String group = (String) json.get(GROUP_JSON);
+    FsPermission permission =
+      new FsPermission(Short.parseShort((String) json.get(PERMISSION_JSON), 8));
+    long aTime = (Long) json.get(ACCESS_TIME_JSON);
+    long mTime = (Long) json.get(MODIFICATION_TIME_JSON);
+    long blockSize = (Long) json.get(BLOCK_SIZE_JSON);
+    short replication = ((Long) json.get(REPLICATION_JSON)).shortValue();
+    FileStatus fileStatus = null;
+
+    switch (type) {
+      case FILE:
+      case DIRECTORY:
+        fileStatus = new FileStatus(len, (type == FILE_TYPE.DIRECTORY),
+                                    replication, blockSize, mTime, aTime,
+                                    permission, owner, group, path);
+        break;
+      case SYMLINK:
+        Path symLink = null;
+        fileStatus = new FileStatus(len, false,
+                                    replication, blockSize, mTime, aTime,
+                                    permission, owner, group, symLink,
+                                    path);
+    }
+    return fileStatus;
+  }
+
+  @Override
+  public ContentSummary getContentSummary(Path f) throws IOException {
+    Map<String, String> params = new HashMap<String, String>();
+    params.put(OP_PARAM, GetOpValues.GETCONTENTSUMMARY.toString());
+    HttpURLConnection conn = getConnection(HTTP_GET, params, f, true);
+    validateResponse(conn, HttpURLConnection.HTTP_OK);
+    JSONObject json = (JSONObject) ((JSONObject) jsonParse(conn)).get(CONTENT_SUMMARY_JSON);
+    return new ContentSummary((Long) json.get(CONTENT_SUMMARY_LENGTH_JSON),
+                              (Long) json.get(CONTENT_SUMMARY_FILE_COUNT_JSON),
+                              (Long) json.get(CONTENT_SUMMARY_DIRECTORY_COUNT_JSON),
+                              (Long) json.get(CONTENT_SUMMARY_QUOTA_JSON),
+                              (Long) json.get(CONTENT_SUMMARY_SPACE_CONSUMED_JSON),
+                              (Long) json.get(CONTENT_SUMMARY_SPACE_QUOTA_JSON)
+    );
+  }
+
+  @Override
+  public FileChecksum getFileChecksum(Path f) throws IOException {
+    Map<String, String> params = new HashMap<String, String>();
+    params.put(OP_PARAM, GetOpValues.GETFILECHECKSUM.toString());
+    HttpURLConnection conn = getConnection(HTTP_GET, params, f, true);
+    validateResponse(conn, HttpURLConnection.HTTP_OK);
+    final JSONObject json = (JSONObject) ((JSONObject) jsonParse(conn)).get(FILE_CHECKSUM_JSON);
+    return new FileChecksum() {
+      @Override
+      public String getAlgorithmName() {
+        return (String) json.get(CHECKSUM_ALGORITHM_JSON);
+      }
+
+      @Override
+      public int getLength() {
+        return ((Long) json.get(CHECKSUM_LENGTH_JSON)).intValue();
+      }
+
+      @Override
+      public byte[] getBytes() {
+        return StringUtils.hexStringToByte((String) json.get(CHECKSUM_BYTES_JSON));
+      }
+
+      @Override
+      public void write(DataOutput out) throws IOException {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public void readFields(DataInput in) throws IOException {
+        throw new UnsupportedOperationException();
+      }
+    };
+  }
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpKerberosAuthenticator.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpKerberosAuthenticator.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpKerberosAuthenticator.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpKerberosAuthenticator.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.http.client;
+
+
+import org.apache.hadoop.security.authentication.client.Authenticator;
+import org.apache.hadoop.security.authentication.client.KerberosAuthenticator;
+
+/**
+ * A <code>KerberosAuthenticator</code> subclass that fallback to
+ * {@link HttpPseudoAuthenticator}.
+ */
+public class HttpKerberosAuthenticator extends KerberosAuthenticator {
+
+  /**
+   * Returns the fallback authenticator if the server does not use
+   * Kerberos SPNEGO HTTP authentication.
+   *
+   * @return a {@link HttpPseudoAuthenticator} instance.
+   */
+  @Override
+  protected Authenticator getFallBackAuthenticator() {
+    return new HttpPseudoAuthenticator();
+  }
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpPseudoAuthenticator.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpPseudoAuthenticator.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpPseudoAuthenticator.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpPseudoAuthenticator.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.http.client;
+
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.client.PseudoAuthenticator;
+
+import java.io.IOException;
+
+/**
+ * A <code>PseudoAuthenticator</code> subclass that uses FileSystemAccess's
+ * <code>UserGroupInformation</code> to obtain the client user name (the UGI's login user).
+ */
+public class HttpPseudoAuthenticator extends PseudoAuthenticator {
+
+  /**
+   * Return the client user name.
+   *
+   * @return the client user name.
+   */
+  @Override
+  protected String getUserName() {
+    try {
+      return UserGroupInformation.getLoginUser().getUserName();
+    } catch (IOException ex) {
+      throw new SecurityException("Could not obtain current user, " + ex.getMessage(), ex);
+    }
+  }
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/AuthFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/AuthFilter.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/AuthFilter.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/AuthFilter.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.http.server;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+
+import javax.servlet.FilterConfig;
+import java.util.Map;
+import java.util.Properties;
+
+/**
+ * Subclass of Alfredo's <code>AuthenticationFilter</code> that obtains its configuration
+ * from HttpFSServer's server configuration.
+ */
+public class AuthFilter extends AuthenticationFilter {
+  private static final String CONF_PREFIX = "httpfs.authentication.";
+
+  /**
+   * Returns the Alfredo configuration from HttpFSServer's configuration.
+   * <p/>
+   * It returns all HttpFSServer's configuration properties prefixed with
+   * <code>httpfs.authentication</code>. The <code>httpfs.authentication</code>
+   * prefix is removed from the returned property names.
+   *
+   * @param configPrefix parameter not used.
+   * @param filterConfig parameter not used.
+   *
+   * @return Alfredo configuration read from HttpFSServer's configuration.
+   */
+  @Override
+  protected Properties getConfiguration(String configPrefix, FilterConfig filterConfig) {
+    Properties props = new Properties();
+    Configuration conf = HttpFSServerWebApp.get().getConfig();
+
+    props.setProperty(AuthenticationFilter.COOKIE_PATH, "/");
+    for (Map.Entry<String, String> entry : conf) {
+      String name = entry.getKey();
+      if (name.startsWith(CONF_PREFIX)) {
+        String value = conf.get(name);
+        name = name.substring(CONF_PREFIX.length());
+        props.setProperty(name, value);
+      }
+    }
+    return props;
+  }
+
+
+}



Mime
View raw message