hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sur...@apache.org
Subject svn commit: r1415815 [1/2] - in /hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common: ./ dev-support/ src/main/conf/ src/main/docs/ src/main/docs/src/documentation/content/xdocs/ src/main/java/ src/main/java/org/apache/hadoop/fs...
Date Fri, 30 Nov 2012 19:58:38 GMT
Author: suresh
Date: Fri Nov 30 19:58:09 2012
New Revision: 1415815

URL: http://svn.apache.org/viewvc?rev=1415815&view=rev
Log:
Merging trunk into branch-trunk-win

Added:
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathAccessDeniedException.java
      - copied unchanged from r1415786, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathAccessDeniedException.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathExistsException.java
      - copied unchanged from r1415786, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathExistsException.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathIOException.java
      - copied unchanged from r1415786, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathIOException.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathIsDirectoryException.java
      - copied unchanged from r1415786, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathIsDirectoryException.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathIsNotDirectoryException.java
      - copied unchanged from r1415786, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathIsNotDirectoryException.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathIsNotEmptyDirectoryException.java
      - copied unchanged from r1415786, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathIsNotEmptyDirectoryException.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathNotFoundException.java
      - copied unchanged from r1415786, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathNotFoundException.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathOperationException.java
      - copied unchanged from r1415786, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathOperationException.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathPermissionException.java
      - copied unchanged from r1415786, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathPermissionException.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/NoCacheFilter.java
      - copied unchanged from r1415786, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/NoCacheFilter.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPlainServer.java
      - copied unchanged from r1415786, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPlainServer.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/site/apt/CLIMiniCluster.apt.vm
      - copied unchanged from r1415786, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/apt/CLIMiniCluster.apt.vm
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/site/apt/ClusterSetup.apt.vm
      - copied unchanged from r1415786, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/apt/ClusterSetup.apt.vm
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/site/apt/SingleCluster.apt.vm
      - copied unchanged from r1415786, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/apt/SingleCluster.apt.vm
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/site/resources/
      - copied from r1415786, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/resources/
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/site/resources/css/
      - copied from r1415786, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/resources/css/
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/site/resources/css/site.css
      - copied unchanged from r1415786, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/resources/css/site.css
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/site/site.xml
      - copied unchanged from r1415786, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/site.xml
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegationTokenRenewer.java
      - copied unchanged from r1415786, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegationTokenRenewer.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java
      - copied unchanged from r1415786, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java
Removed:
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathExceptions.java
Modified:
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/   (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/cluster_setup.xml
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/   (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Mkdir.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/MoveCommands.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/SetReplication.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Tail.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Touchz.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/core/   (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathExceptions.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt Fri Nov 30 19:58:09 2012
@@ -132,6 +132,15 @@ Trunk (Unreleased)
     HADOOP-9004. Allow security unit tests to use external KDC. (Stephen Chu
     via suresh)
 
+    HADOOP-6616. Improve documentation for rack awareness. (Adam Faris via 
+    jghoman)
+
+    HADOOP-9075. FileContext#FSLinkResolver should be made static.
+    (Arpit Agarwal via suresh)
+
+    HADOOP-9093. Move all the Exception in PathExceptions to o.a.h.fs package.
+    (suresh)
+
   BUG FIXES
 
     HADOOP-8177. MBeans shouldn't try to register when it fails to create MBeanName.
@@ -277,11 +286,15 @@ Trunk (Unreleased)
 
     HADOOP-8974. TestDFVariations fails on Windows. (Chris Nauroth via suresh)
 
+    HADOOP-9037. Bug in test-patch.sh and precommit build process (Kihwal Lee
+    via jlowe)
+
   OPTIMIZATIONS
 
     HADOOP-7761. Improve the performance of raw comparisons. (todd)
 
     HADOOP-8589 ViewFs tests fail when tests and home dirs are nested (sanjay Radia)
+
 Release 2.0.3-alpha - Unreleased 
 
   INCOMPATIBLE CHANGES
@@ -291,6 +304,8 @@ Release 2.0.3-alpha - Unreleased 
     HADOOP-8597. Permit FsShell's text command to read Avro files.
     (Ivan Vladimirov Ivanov via cutting)
 
+    HADOOP-9020. Add a SASL PLAIN server (daryn via bobby)
+
   IMPROVEMENTS
 
     HADOOP-8789. Tests setLevel(Level.OFF) should be Level.ERROR.
@@ -355,6 +370,19 @@ Release 2.0.3-alpha - Unreleased 
 
     HADOOP-9015. Standardize creation of SaslRpcServers (daryn via bobby)
 
+    HADOOP-8860. Split MapReduce and YARN sections in documentation navigation.
+    (tomwhite via tucu)
+
+    HADOOP-9021. Enforce configured SASL method on the server (daryn via
+    bobby)
+
+    HADOO-8998. set Cache-Control no-cache header on all dynamic content. (tucu)
+
+    HADOOP-9035. Generalize setup of LoginContext (daryn via bobby)
+
+    HADOOP-9042. Add a test for umask in FileSystemContractBaseTest.
+    (Colin McCabe via eli)
+
   OPTIMIZATIONS
 
     HADOOP-8866. SampleQuantiles#query is O(N^2) instead of O(N). (Andrew Wang
@@ -413,6 +441,21 @@ Release 2.0.3-alpha - Unreleased 
 
     HADOOP-9012. IPC Client sends wrong connection context (daryn via bobby)
 
+    HADOOP-7115. Add a cache for getpwuid_r and getpwgid_r calls (tucu)
+
+    HADOOP-8999. SASL negotiation is flawed (daryn)
+
+    HADOOP-6607. Add different variants of non caching HTTP headers. (tucu)
+
+    HADOOP-9049. DelegationTokenRenewer needs to be Singleton and FileSystems
+    should register/deregister to/from. (Karthik Kambatla via tomwhite)
+
+    HADOOP-9064. Augment DelegationTokenRenewer API to cancel the tokens on 
+    calls to removeRenewAction. (kkambatl via tucu)
+
+    HADOOP-8958. ViewFs:Non absolute mount name failures when running 
+    multiple tests on Windows. (Chris Nauroth via suresh)
+
 Release 2.0.2-alpha - 2012-09-07 
 
   INCOMPATIBLE CHANGES
@@ -1103,6 +1146,27 @@ Release 2.0.0-alpha - 05-23-2012
     HADOOP-8655. Fix TextInputFormat for large deliminators. (Gelesh via
     bobby) 
 
+Release 0.23.6 - UNRELEASED
+
+  INCOMPATIBLE CHANGES
+
+  NEW FEATURES
+
+  IMPROVEMENTS
+
+  OPTIMIZATIONS
+
+  BUG FIXES
+
+    HADOOP-9072. Hadoop-Common-0.23-Build Fails to build in Jenkins 
+    (Robert Parker via tgraves)
+
+    HADOOP-8992. Enhance unit-test coverage of class HarFileSystem (Ivan A.
+    Veselovsky via bobby)
+
+    HADOOP-9038. unit-tests for AllocatorPerContext.PathIterator (Ivan A.
+    Veselovsky via bobby)
+
 Release 0.23.5 - UNRELEASED
 
   INCOMPATIBLE CHANGES
@@ -1132,7 +1196,13 @@ Release 0.23.5 - UNRELEASED
 
     HADOOP-8986. Server$Call object is never released after it is sent (bobby)
 
-Release 0.23.4 - UNRELEASED
+    HADOOP-9022. Hadoop distcp tool fails to copy file if -m 0 specified
+    (Jonathan Eagles vai bobby)
+
+    HADOOP-9025. org.apache.hadoop.tools.TestCopyListing failing (Jonathan
+    Eagles via jlowe)
+
+Release 0.23.4
 
   INCOMPATIBLE CHANGES
 

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1407223-1415786

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml Fri Nov 30 19:58:09 2012
@@ -291,5 +291,13 @@
        <Field name="previousSnapshot" />
        <Bug pattern="IS2_INCONSISTENT_SYNC" />
      </Match>
-
+     <!--
+       The method uses a generic type T that extends two other types
+       T1 and T2. Findbugs complains of a cast from T1 to T2.
+     -->
+     <Match>
+       <Class name="org.apache.hadoop.fs.DelegationTokenRenewer" />
+       <Method name="removeRenewAction" />
+       <Bug pattern="BC_UNCONFIRMED_CAST" />
+     </Match>
  </FindBugsFilter>

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties Fri Nov 30 19:58:09 2012
@@ -100,6 +100,13 @@ log4j.appender.TLA.layout=org.apache.log
 log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
 
 #
+# HDFS block state change log from block manager
+#
+# Uncomment the following to suppress normal block state change
+# messages from BlockManager in NameNode.
+#log4j.logger.BlockStateChange=WARN
+
+#
 #Security appender
 #
 hadoop.security.logger=INFO,NullAppender

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1407223-1415786

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/cluster_setup.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/cluster_setup.xml?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/cluster_setup.xml (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/cluster_setup.xml Fri Nov 30 19:58:09 2012
@@ -1292,23 +1292,139 @@
     
     <section>
       <title>Hadoop Rack Awareness</title>
-      <p>The HDFS and the Map/Reduce components are rack-aware.</p>
-      <p>The <code>NameNode</code> and the <code>JobTracker</code> obtains the
-      <code>rack id</code> of the slaves in the cluster by invoking an API 
-      <a href="ext:api/org/apache/hadoop/net/dnstoswitchmapping/resolve
-      ">resolve</a> in an administrator configured
-      module. The API resolves the slave's DNS name (also IP address) to a 
-      rack id. What module to use can be configured using the configuration
-      item <code>net.topology.node.switch.mapping.impl</code>. The default 
-      implementation of the same runs a script/command configured using 
-      <code>net.topology.script.file.name</code>. If topology.script.file.name is
-      not set, the rack id <code>/default-rack</code> is returned for any 
-      passed IP address. The additional configuration in the Map/Reduce
-      part is <code>mapred.cache.task.levels</code> which determines the number
-      of levels (in the network topology) of caches. So, for example, if it is
-      the default value of 2, two levels of caches will be constructed - 
-      one for hosts (host -> task mapping) and another for racks 
-      (rack -> task mapping).
+      <p>
+         Both HDFS and Map/Reduce components are rack-aware.  HDFS block placement will use rack 
+         awareness for fault tolerance by placing one block replica on a different rack.  This provides 
+         data availability in the event of a network switch failure within the cluster.  The jobtracker uses rack
+         awareness to reduce network transfers of HDFS data blocks by attempting to schedule tasks on datanodes with a local
+         copy of needed HDFS blocks.  If the tasks cannot be scheduled on the datanodes
+         containing the needed HDFS blocks, then the tasks will be scheduled on the same rack to reduce network transfers if possible.
+      </p>
+      <p>The NameNode and the JobTracker obtain the rack id of the cluster slaves by invoking either 
+         an external script or java class as specified by configuration files.  Using either the 
+         java class or external script for topology, output must adhere to the java 
+         <a href="ext:api/org/apache/hadoop/net/dnstoswitchmapping/resolve">DNSToSwitchMapping</a> 
+         interface.  The interface expects a one-to-one correspondence to be maintained 
+         and the topology information in the format of '/myrack/myhost', where '/' is the topology 
+         delimiter, 'myrack' is the rack identifier, and 'myhost' is the individual host.  Assuming 
+         a single /24 subnet per rack, one could use the format of '/192.168.100.0/192.168.100.5' as a 
+         unique rack-host topology mapping.
+      </p>
+      <p>
+         To use the java class for topology mapping, the class name is specified by the 
+         <code>'topology.node.switch.mapping.impl'</code> parameter in the configuration file. 
+         An example, NetworkTopology.java, is included with the hadoop distribution and can be customized 
+         by the hadoop administrator.  If not included with your distribution, NetworkTopology.java can also be found in the Hadoop 
+         <a href="http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java?view=markup">
+         subversion tree</a>.  Using a java class instead of an external script has a slight performance benefit in 
+         that it doesn't need to fork an external process when a new slave node registers itself with the jobtracker or namenode.  
+         As this class is only used during slave node registration, the performance benefit is limited.  
+      </p>
+      <p>
+         If implementing an external script, it will be specified with the
+         <code>topology.script.file.name</code> parameter in the configuration files.  Unlike the java 
+         class, the external topology script is not included with the Hadoop distribution and is provided by the 
+         administrator.  Hadoop will send multiple IP addresses to ARGV when forking the topology script.  The  
+         number of IP addresses sent to the topology script is controlled with <code>net.topology.script.number.args</code>
+         and defaults to 100. If <code>net.topology.script.number.args</code> was changed to 1, a topology script would 
+         get forked for each IP submitted by datanodes and/or tasktrackers.  Below are example topology scripts.
+      </p>
+      <section>
+      <title>Python example</title>
+      <source>
+      <code>
+      #!/usr/bin/python
+
+      # this script makes assumptions about the physical environment.
+      #  1) each rack is its own layer 3 network with a /24 subnet, which could be typical where each rack has its own
+      #     switch with uplinks to a central core router.
+      #     
+      #             +-----------+
+      #             |core router|
+      #             +-----------+
+      #            /             \
+      #   +-----------+        +-----------+
+      #   |rack switch|        |rack switch|
+      #   +-----------+        +-----------+
+      #   | data node |        | data node |
+      #   +-----------+        +-----------+
+      #   | data node |        | data node |
+      #   +-----------+        +-----------+
+      #
+      # 2) topology script gets list of IP's as input, calculates network address, and prints '/network_address/ip'.
+
+      import netaddr
+      import sys             
+      sys.argv.pop(0)                                                  # discard name of topology script from argv list as we just want IP addresses
+
+      netmask = '255.255.255.0'                                        # set netmask to what's being used in your environment.  The example uses a /24
+
+      for ip in sys.argv:                                              # loop over list of datanode IP's
+          address = '{0}/{1}'.format(ip, netmask)                      # format address string so it looks like 'ip/netmask' to make netaddr work
+          try:
+              network_address = netaddr.IPNetwork(address).network     # calculate and print network address
+              print "/{0}".format(network_address)                     
+          except:
+              print "/rack-unknown"                                    # print catch-all value if unable to calculate network address
+
+      </code>
+      </source>
+      </section>
+          
+      <section>
+      <title>Bash  example</title>
+      <source>
+      <code>
+      #!/bin/bash
+      # Here's a bash example to show just how simple these scripts can be
+      
+      # Assuming we have flat network with everything on a single switch, we can fake a rack topology. 
+      # This could occur in a lab environment where we have limited nodes,like 2-8 physical machines on a unmanaged switch. 
+      # This may also apply to multiple virtual machines running on the same physical hardware.  
+      # The number of machines isn't important, but that we are trying to fake a network topology when there isn't one. 
+      #
+      #       +----------+    +--------+
+      #       |jobtracker|    |datanode| 
+      #       +----------+    +--------+
+      #              \        /
+      #  +--------+  +--------+  +--------+
+      #  |datanode|--| switch |--|datanode|
+      #  +--------+  +--------+  +--------+
+      #              /        \
+      #       +--------+    +--------+
+      #       |datanode|    |namenode| 
+      #       +--------+    +--------+
+      #
+      # With this network topology, we are treating each host as a rack.  This is being done by taking the last octet 
+      # in the datanode's IP and prepending it with the word '/rack-'.  The advantage for doing this is so HDFS
+      # can create its 'off-rack' block copy.
+      
+      # 1) 'echo $@' will echo all ARGV values to xargs.  
+      # 2) 'xargs' will enforce that we print a single argv value per line
+      # 3) 'awk' will split fields on dots and append the last field to the string '/rack-'. If awk 
+      #    fails to split on four dots, it will still print '/rack-' last field value
+
+      echo $@ | xargs -n 1 | awk -F '.' '{print "/rack-"$NF}'
+
+
+      </code>
+      </source>
+      </section>
+
+
+      <p>
+         If <code>topology.script.file.name</code> or <code>topology.node.switch.mapping.impl</code> is 
+         not set, the rack id '/default-rack' is returned for any passed IP address.  
+         While this behavior appears desirable, it can cause issues with HDFS block replication as 
+         default behavior is to write one replicated block off rack and is unable to do so as there is 
+         only a single rack named '/default-rack'.
+      </p>
+      <p>
+         An additional configuration setting is <code>mapred.cache.task.levels</code> which determines 
+         the number of levels (in the network topology) of caches. So, for example, if it is the 
+         default value of 2, two levels of caches will be constructed - one for hosts 
+         (host -> task mapping) and another for racks (rack -> task mapping). Giving us our one-to-one 
+          mapping of '/myrack/myhost'
       </p>
     </section>
     

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1407223-1415786

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java Fri Nov 30 19:58:09 2012
@@ -184,5 +184,11 @@ public class CommonConfigurationKeys ext
    */
   public static final String KERBEROS_TICKET_CACHE_PATH =
       "hadoop.security.kerberos.ticket.cache.path";
-}
 
+  public static final String HADOOP_SECURITY_UID_NAME_CACHE_TIMEOUT_KEY =
+    "hadoop.security.uid.cache.secs";
+
+  public static final long HADOOP_SECURITY_UID_NAME_CACHE_TIMEOUT_DEFAULT =
+    4*60*60; // 4 hours
+
+}
\ No newline at end of file

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java Fri Nov 30 19:58:09 2012
@@ -24,6 +24,8 @@ import java.util.concurrent.DelayQueue;
 import java.util.concurrent.Delayed;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
@@ -33,8 +35,11 @@ import org.apache.hadoop.util.Time;
  * A daemon thread that waits for the next file system to renew.
  */
 @InterfaceAudience.Private
-public class DelegationTokenRenewer<T extends FileSystem & DelegationTokenRenewer.Renewable>
+public class DelegationTokenRenewer
     extends Thread {
+  private static final Log LOG = LogFactory
+      .getLog(DelegationTokenRenewer.class);
+
   /** The renewable interface used by the renewer. */
   public interface Renewable {
     /** @return the renew token. */
@@ -93,7 +98,7 @@ public class DelegationTokenRenewer<T ex
      * @param newTime the new time
      */
     private void updateRenewalTime() {
-      renewalTime = RENEW_CYCLE + Time.now();
+      renewalTime = renewCycle + Time.now();
     }
 
     /**
@@ -134,34 +139,82 @@ public class DelegationTokenRenewer<T ex
   }
 
   /** Wait for 95% of a day between renewals */
-  private static final int RENEW_CYCLE = 24 * 60 * 60 * 950;
+  private static final int RENEW_CYCLE = 24 * 60 * 60 * 950; 
 
-  private DelayQueue<RenewAction<T>> queue = new DelayQueue<RenewAction<T>>();
+  @InterfaceAudience.Private
+  protected static int renewCycle = RENEW_CYCLE;
 
-  public DelegationTokenRenewer(final Class<T> clazz) {
+  /** Queue to maintain the RenewActions to be processed by the {@link #run()} */
+  private volatile DelayQueue<RenewAction<?>> queue = new DelayQueue<RenewAction<?>>();
+  
+  /**
+   * Create the singleton instance. However, the thread can be started lazily in
+   * {@link #addRenewAction(FileSystem)}
+   */
+  private static DelegationTokenRenewer INSTANCE = null;
+
+  private DelegationTokenRenewer(final Class<? extends FileSystem> clazz) {
     super(clazz.getSimpleName() + "-" + DelegationTokenRenewer.class.getSimpleName());
     setDaemon(true);
   }
 
+  public static synchronized DelegationTokenRenewer getInstance() {
+    if (INSTANCE == null) {
+      INSTANCE = new DelegationTokenRenewer(FileSystem.class);
+    }
+    return INSTANCE;
+  }
+
   /** Add a renew action to the queue. */
-  public void addRenewAction(final T fs) {
+  public synchronized <T extends FileSystem & Renewable> void addRenewAction(final T fs) {
     queue.add(new RenewAction<T>(fs));
+    if (!isAlive()) {
+      start();
+    }
+  }
+
+  /**
+   * Remove the associated renew action from the queue
+   * 
+   * @throws IOException
+   */
+  public synchronized <T extends FileSystem & Renewable> void removeRenewAction(
+      final T fs) throws IOException {
+    for (RenewAction<?> action : queue) {
+      if (action.weakFs.get() == fs) {
+        try {
+          fs.getRenewToken().cancel(fs.getConf());
+        } catch (InterruptedException ie) {
+          LOG.error("Interrupted while canceling token for " + fs.getUri()
+              + "filesystem");
+          if (LOG.isDebugEnabled()) {
+            LOG.debug(ie.getStackTrace());
+          }
+        }
+        queue.remove(action);
+        return;
+      }
+    }
   }
 
+  @SuppressWarnings("static-access")
   @Override
   public void run() {
     for(;;) {
-      RenewAction<T> action = null;
+      RenewAction<?> action = null;
       try {
-        action = queue.take();
-        if (action.renew()) {
-          action.updateRenewalTime();
-          queue.add(action);
+        synchronized (this) {
+          action = queue.take();
+          if (action.renew()) {
+            action.updateRenewalTime();
+            queue.add(action);
+          }
         }
       } catch (InterruptedException ie) {
         return;
       } catch (Exception ie) {
-        T.LOG.warn("Failed to renew token, action=" + action, ie);
+        action.weakFs.get().LOG.warn("Failed to renew token, action=" + action,
+            ie);
       }
     }
   }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java Fri Nov 30 19:58:09 2012
@@ -1119,7 +1119,7 @@ public final class FileContext {
    * @param target The symlink's absolute target
    * @return Fully qualified version of the target.
    */
-  private Path qualifySymlinkTarget(final AbstractFileSystem pathFS,
+  private static Path qualifySymlinkTarget(final AbstractFileSystem pathFS,
     Path pathWithLink, Path target) {
     // NB: makeQualified uses the target's scheme and authority, if
     // specified, and the scheme and authority of pathFS, if not.
@@ -2321,7 +2321,7 @@ public final class FileContext {
    * Class used to perform an operation on and resolve symlinks in a
    * path. The operation may potentially span multiple file systems.  
    */
-  protected abstract class FSLinkResolver<T> {
+  protected static abstract class FSLinkResolver<T> {
     // The maximum number of symbolic link components in a path
     private static final int MAX_PATH_LINKS = 32;
 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java Fri Nov 30 19:58:09 2012
@@ -584,13 +584,6 @@ public class HarFileSystem extends Filte
     public String getName() {
       return name;
     }
-    
-    public List<String> getChildren() {
-      return children;
-    }
-    public String getFileName() {
-      return name;
-    }
     public String getPartName() {
       return partName;
     }
@@ -662,15 +655,6 @@ public class HarFileSystem extends Filte
         hstatus.getStartIndex(), hstatus.getLength(), bufferSize);
   }
  
-  /*
-   * create throws an exception in Har filesystem.
-   * The archive once created cannot be changed.
-   */
-  public FSDataOutputStream create(Path f, int bufferSize) 
-                                    throws IOException {
-    throw new IOException("Har: Create not allowed");
-  }
-  
   @Override
   public FSDataOutputStream create(Path f,
       FsPermission permission,
@@ -1106,4 +1090,11 @@ public class HarFileSystem extends Filte
       }
     }
   }
+  
+  /*
+   * testing purposes only:
+   */
+  HarMetaData getMetadata() {
+    return metadata;
+  }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java Fri Nov 30 19:58:09 2012
@@ -481,12 +481,15 @@ public class LocalDirAllocator {
 
       @Override
       public Path next() {
-        Path result = next;
+        final Path result = next;
         try {
           advance();
         } catch (IOException ie) {
           throw new RuntimeException("Can't check existance of " + next, ie);
         }
+        if (result == null) {
+          throw new NoSuchElementException();
+        }
         return result;
       }
 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java Fri Nov 30 19:58:09 2012
@@ -184,7 +184,18 @@ public class FsPermission implements Wri
     return str;
   }
 
-  /** Apply a umask to this permission and return a new one */
+  /**
+   * Apply a umask to this permission and return a new one.
+   *
+   * The umask is used by create, mkdir, and other Hadoop filesystem operations.
+   * The mode argument for these operations is modified by removing the bits
+   * which are set in the umask.  Thus, the umask limits the permissions which
+   * newly created files and directories get.
+   *
+   * @param umask              The umask to use
+   * 
+   * @return                   The effective permission
+   */
   public FsPermission applyUMask(FsPermission umask) {
     return new FsPermission(useraction.and(umask.useraction.not()),
         groupaction.and(umask.groupaction.not()),

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java Fri Nov 30 19:58:09 2012
@@ -34,7 +34,7 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.shell.PathExceptions.PathNotFoundException;
+import org.apache.hadoop.fs.PathNotFoundException;
 import org.apache.hadoop.util.StringUtils;
 
 /**

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java Fri Nov 30 19:58:09 2012
@@ -29,12 +29,12 @@ import org.apache.hadoop.fs.FSDataOutput
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FilterFileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.shell.PathExceptions.PathExistsException;
-import org.apache.hadoop.fs.shell.PathExceptions.PathIOException;
-import org.apache.hadoop.fs.shell.PathExceptions.PathIsDirectoryException;
-import org.apache.hadoop.fs.shell.PathExceptions.PathIsNotDirectoryException;
-import org.apache.hadoop.fs.shell.PathExceptions.PathNotFoundException;
-import org.apache.hadoop.fs.shell.PathExceptions.PathOperationException;
+import org.apache.hadoop.fs.PathExistsException;
+import org.apache.hadoop.fs.PathIOException;
+import org.apache.hadoop.fs.PathIsDirectoryException;
+import org.apache.hadoop.fs.PathIsNotDirectoryException;
+import org.apache.hadoop.fs.PathNotFoundException;
+import org.apache.hadoop.fs.PathOperationException;
 import org.apache.hadoop.io.IOUtils;
 
 /**

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java Fri Nov 30 19:58:09 2012
@@ -29,7 +29,7 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.shell.PathExceptions.PathIsDirectoryException;
+import org.apache.hadoop.fs.PathIsDirectoryException;
 import org.apache.hadoop.io.IOUtils;
 
 /** Various commands for copy files */

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java Fri Nov 30 19:58:09 2012
@@ -24,11 +24,11 @@ import java.util.LinkedList;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.PathIOException;
+import org.apache.hadoop.fs.PathIsDirectoryException;
+import org.apache.hadoop.fs.PathIsNotDirectoryException;
+import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException;
 import org.apache.hadoop.fs.Trash;
-import org.apache.hadoop.fs.shell.PathExceptions.PathIOException;
-import org.apache.hadoop.fs.shell.PathExceptions.PathIsDirectoryException;
-import org.apache.hadoop.fs.shell.PathExceptions.PathIsNotDirectoryException;
-import org.apache.hadoop.fs.shell.PathExceptions.PathIsNotEmptyDirectoryException;
 
 /**
  * Classes that delete paths

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java Fri Nov 30 19:58:09 2012
@@ -38,7 +38,7 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.shell.PathExceptions.PathIsDirectoryException;
+import org.apache.hadoop.fs.PathIsDirectoryException;
 import org.apache.hadoop.io.DataInputBuffer;
 import org.apache.hadoop.io.DataOutputBuffer;
 import org.apache.hadoop.io.IOUtils;

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Mkdir.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Mkdir.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Mkdir.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Mkdir.java Fri Nov 30 19:58:09 2012
@@ -24,10 +24,10 @@ import java.util.LinkedList;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.shell.PathExceptions.PathExistsException;
-import org.apache.hadoop.fs.shell.PathExceptions.PathIOException;
-import org.apache.hadoop.fs.shell.PathExceptions.PathIsNotDirectoryException;
-import org.apache.hadoop.fs.shell.PathExceptions.PathNotFoundException;
+import org.apache.hadoop.fs.PathExistsException;
+import org.apache.hadoop.fs.PathIOException;
+import org.apache.hadoop.fs.PathIsNotDirectoryException;
+import org.apache.hadoop.fs.PathNotFoundException;
 
 /**
  * Create the given dir

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/MoveCommands.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/MoveCommands.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/MoveCommands.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/MoveCommands.java Fri Nov 30 19:58:09 2012
@@ -23,8 +23,8 @@ import java.util.LinkedList;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.PathIOException;
 import org.apache.hadoop.fs.shell.CopyCommands.CopyFromLocal;
-import org.apache.hadoop.fs.shell.PathExceptions.PathIOException;
 
 /** Various commands for moving files */
 @InterfaceAudience.Private

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java Fri Nov 30 19:58:09 2012
@@ -32,10 +32,10 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.shell.PathExceptions.PathIOException;
-import org.apache.hadoop.fs.shell.PathExceptions.PathIsDirectoryException;
-import org.apache.hadoop.fs.shell.PathExceptions.PathIsNotDirectoryException;
-import org.apache.hadoop.fs.shell.PathExceptions.PathNotFoundException;
+import org.apache.hadoop.fs.PathIOException;
+import org.apache.hadoop.fs.PathIsDirectoryException;
+import org.apache.hadoop.fs.PathIsNotDirectoryException;
+import org.apache.hadoop.fs.PathNotFoundException;
 
 /**
  * Encapsulates a Path (path), its FileStatus (stat), and its FileSystem (fs).

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/SetReplication.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/SetReplication.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/SetReplication.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/SetReplication.java Fri Nov 30 19:58:09 2012
@@ -25,7 +25,7 @@ import java.util.List;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.BlockLocation;
-import org.apache.hadoop.fs.shell.PathExceptions.PathIOException;
+import org.apache.hadoop.fs.PathIOException;
 
 /**
  * Modifies the replication factor

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Tail.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Tail.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Tail.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Tail.java Fri Nov 30 19:58:09 2012
@@ -25,7 +25,7 @@ import java.util.List;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.shell.PathExceptions.PathIsDirectoryException;
+import org.apache.hadoop.fs.PathIsDirectoryException;
 import org.apache.hadoop.io.IOUtils;
 
 /**

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java Fri Nov 30 19:58:09 2012
@@ -23,7 +23,6 @@ import java.util.LinkedList;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.fs.shell.PathExceptions.PathNotFoundException;
 
 /**
  * Perform shell-like file tests 
@@ -91,8 +90,6 @@ class Test extends FsCommand {  
 
   @Override
   protected void processNonexistentPath(PathData item) throws IOException {
-    // NOTE: errors for FNF is not how the shell works!
-    if (flag != 'e') displayError(new PathNotFoundException(item.toString()));
     exitCode = 1;
   }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Touchz.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Touchz.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Touchz.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Touchz.java Fri Nov 30 19:58:09 2012
@@ -23,9 +23,9 @@ import java.util.LinkedList;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.fs.shell.PathExceptions.PathIOException;
-import org.apache.hadoop.fs.shell.PathExceptions.PathIsDirectoryException;
-import org.apache.hadoop.fs.shell.PathExceptions.PathNotFoundException;
+import org.apache.hadoop.fs.PathIOException;
+import org.apache.hadoop.fs.PathIsDirectoryException;
+import org.apache.hadoop.fs.PathNotFoundException;
 
 /**
  * Unix touch like commands 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java Fri Nov 30 19:58:09 2012
@@ -26,6 +26,7 @@ import java.net.InetSocketAddress;
 import java.net.URL;
 import java.security.GeneralSecurityException;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.Enumeration;
 import java.util.HashMap;
 import java.util.List;
@@ -104,6 +105,7 @@ public class HttpServer implements Filte
   public static final String CONF_CONTEXT_ATTRIBUTE = "hadoop.conf";
   public static final String ADMINS_ACL = "admins.acl";
   public static final String SPNEGO_FILTER = "SpnegoFilter";
+  public static final String NO_CACHE_FILTER = "NoCacheFilter";
 
   public static final String BIND_ADDRESS = "bind.address";
 
@@ -256,6 +258,7 @@ public class HttpServer implements Filte
     webAppContext.setWar(appDir + "/" + name);
     webAppContext.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
     webAppContext.getServletContext().setAttribute(ADMINS_ACL, adminsAcl);
+    addNoCacheFilter(webAppContext);
     webServer.addHandler(webAppContext);
 
     addDefaultApps(contexts, appDir, conf);
@@ -280,6 +283,12 @@ public class HttpServer implements Filte
     }
   }
 
+  @SuppressWarnings("unchecked")
+  private void addNoCacheFilter(WebAppContext ctxt) {
+    defineFilter(ctxt, NO_CACHE_FILTER,
+      NoCacheFilter.class.getName(), Collections.EMPTY_MAP, new String[] { "/*"});
+  }
+
   /**
    * Create a required listener for the Jetty instance listening on the port
    * provided. This wrapper and all subclasses must create at least one
@@ -346,6 +355,7 @@ public class HttpServer implements Filte
       }
       logContext.setDisplayName("logs");
       setContextAttributes(logContext, conf);
+      addNoCacheFilter(webAppContext);
       defaultContexts.put(logContext, true);
     }
     // set up the context for "/static/*"
@@ -377,6 +387,7 @@ public class HttpServer implements Filte
   public void addContext(Context ctxt, boolean isFiltered)
       throws IOException {
     webServer.addHandler(ctxt);
+    addNoCacheFilter(webAppContext);
     defaultContexts.put(ctxt, isFiltered);
   }
 
@@ -470,7 +481,7 @@ public class HttpServer implements Filte
       holder.setName(name);
     }
     webAppContext.addServlet(holder, pathSpec);
-    
+
     if(requireAuth && UserGroupInformation.isSecurityEnabled()) {
        LOG.info("Adding Kerberos (SPNEGO) filter to " + name);
        ServletHandler handler = webAppContext.getServletHandler();
@@ -962,7 +973,7 @@ public class HttpServer implements Filte
       @Override
       public Enumeration<String> getParameterNames() {
         return new Enumeration<String>() {
-          private Enumeration<String> rawIterator = 
+          private Enumeration<String> rawIterator =
             rawRequest.getParameterNames();
           @Override
           public boolean hasMoreElements() {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java Fri Nov 30 19:58:09 2012
@@ -120,7 +120,7 @@ public class SecureIOUtils {
     FileInputStream fis = new FileInputStream(f);
     boolean success = false;
     try {
-      Stat stat = NativeIO.fstat(fis.getFD());
+      Stat stat = NativeIO.getFstat(fis.getFD());
       checkStat(f, stat.getOwner(), stat.getGroup(), expectedOwner,
           expectedGroup);
       success = true;

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java Fri Nov 30 19:58:09 2012
@@ -19,8 +19,13 @@ package org.apache.hadoop.io.nativeio;
 
 import java.io.FileDescriptor;
 import java.io.IOException;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.util.NativeCodeLoader;
 
 import org.apache.commons.logging.Log;
@@ -30,6 +35,8 @@ import org.apache.commons.logging.LogFac
  * These functions should generally be used alongside a fallback to another
  * more portable mechanism.
  */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
 public class NativeIO {
   // Flags for open() call from bits/fcntl.h
   public static final int O_RDONLY   =    00;
@@ -86,6 +93,8 @@ public class NativeIO {
     "hadoop.workaround.non.threadsafe.getpwuid";
   static final boolean WORKAROUND_NON_THREADSAFE_CALLS_DEFAULT = false;
 
+  private static long cacheTimeout = -1;
+
   static {
     if (NativeCodeLoader.isNativeCodeLoaded()) {
       try {
@@ -96,6 +105,14 @@ public class NativeIO {
 
         initNative();
         nativeLoaded = true;
+
+        cacheTimeout = conf.getLong(
+          CommonConfigurationKeys.HADOOP_SECURITY_UID_NAME_CACHE_TIMEOUT_KEY,
+          CommonConfigurationKeys.HADOOP_SECURITY_UID_NAME_CACHE_TIMEOUT_DEFAULT) *
+          1000;
+        LOG.debug("Initialized cache for IDs to User/Group mapping with a" +
+          " cache timeout of " + cacheTimeout/1000 + " seconds.");
+
       } catch (Throwable t) {
         // This can happen if the user has an older version of libhadoop.so
         // installed - in this case we can continue without native IO
@@ -115,7 +132,7 @@ public class NativeIO {
   /** Wrapper around open(2) */
   public static native FileDescriptor open(String path, int flags, int mode) throws IOException;
   /** Wrapper around fstat(2) */
-  public static native Stat fstat(FileDescriptor fd) throws IOException;
+  private static native Stat fstat(FileDescriptor fd) throws IOException;
   /** Wrapper around chmod(2) */
   public static native void chmod(String path, int mode) throws IOException;
 
@@ -176,6 +193,7 @@ public class NativeIO {
    * Result type of the fstat call
    */
   public static class Stat {
+    private int ownerId, groupId;
     private String owner, group;
     private int mode;
 
@@ -196,9 +214,9 @@ public class NativeIO {
     public static final int S_IWUSR = 0000200;  /* write permission, owner */
     public static final int S_IXUSR = 0000100;  /* execute/search permission, owner */
 
-    Stat(String owner, String group, int mode) {
-      this.owner = owner;
-      this.group = group;
+    Stat(int ownerId, int groupId, int mode) {
+      this.ownerId = ownerId;
+      this.groupId = groupId;
       this.mode = mode;
     }
 
@@ -218,4 +236,61 @@ public class NativeIO {
       return mode;
     }
   }
+
+  static native String getUserName(int uid) throws IOException;
+
+  static native String getGroupName(int uid) throws IOException;
+
+  private static class CachedName {
+    final long timestamp;
+    final String name;
+
+    public CachedName(String name, long timestamp) {
+      this.name = name;
+      this.timestamp = timestamp;
+    }
+  }
+
+  private static final Map<Integer, CachedName> USER_ID_NAME_CACHE =
+    new ConcurrentHashMap<Integer, CachedName>();
+
+  private static final Map<Integer, CachedName> GROUP_ID_NAME_CACHE =
+    new ConcurrentHashMap<Integer, CachedName>();
+
+  private enum IdCache { USER, GROUP }
+
+  private static String getName(IdCache domain, int id) throws IOException {
+    Map<Integer, CachedName> idNameCache = (domain == IdCache.USER)
+      ? USER_ID_NAME_CACHE : GROUP_ID_NAME_CACHE;
+    String name;
+    CachedName cachedName = idNameCache.get(id);
+    long now = System.currentTimeMillis();
+    if (cachedName != null && (cachedName.timestamp + cacheTimeout) > now) {
+      name = cachedName.name;
+    } else {
+      name = (domain == IdCache.USER) ? getUserName(id) : getGroupName(id);
+      if (LOG.isDebugEnabled()) {
+        String type = (domain == IdCache.USER) ? "UserName" : "GroupName";
+        LOG.debug("Got " + type + " " + name + " for ID " + id +
+          " from the native implementation");
+      }
+      cachedName = new CachedName(name, now);
+      idNameCache.put(id, cachedName);
+    }
+    return name;
+  }
+
+  /**
+   * Returns the file stat for a file descriptor.
+   *
+   * @param fd file descriptor.
+   * @return the file descriptor file stat.
+   * @throws IOException thrown if there was an IO error while obtaining the file stat.
+   */
+  public static Stat getFstat(FileDescriptor fd) throws IOException {
+    Stat stat = fstat(fd);
+    stat.owner = getName(IdCache.USER, stat.ownerId);
+    stat.group = getName(IdCache.GROUP, stat.groupId);
+    return stat;
+  }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java Fri Nov 30 19:58:09 2012
@@ -294,14 +294,15 @@ public class Client {
         }
       }
       
+      AuthenticationMethod authentication;
       if (token != null) {
-        authMethod = AuthenticationMethod.TOKEN.getAuthMethod();
-      } else if (UserGroupInformation.isSecurityEnabled()) {
-        // eventually just use the ticket's authMethod
-        authMethod = AuthMethod.KERBEROS;
-      } else {
-        authMethod = AuthMethod.SIMPLE;
+        authentication = AuthenticationMethod.TOKEN;
+      } else if (ticket != null) {
+        authentication = ticket.getRealAuthenticationMethod();
+      } else { // this only happens in lazy tests
+        authentication = AuthenticationMethod.SIMPLE;
       }
+      authMethod = authentication.getAuthMethod();
       
       if (LOG.isDebugEnabled())
         LOG.debug("Use " + authMethod + " authentication for protocol "

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java Fri Nov 30 19:58:09 2012
@@ -45,6 +45,7 @@ import java.security.PrivilegedException
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
+import java.util.EnumSet;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
@@ -87,7 +88,9 @@ import org.apache.hadoop.security.SaslRp
 import org.apache.hadoop.security.SaslRpcServer.SaslDigestCallbackHandler;
 import org.apache.hadoop.security.SaslRpcServer.SaslGssCallbackHandler;
 import org.apache.hadoop.security.SaslRpcServer.SaslStatus;
+import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.apache.hadoop.security.authorize.AuthorizationException;
 import org.apache.hadoop.security.authorize.PolicyProvider;
@@ -113,7 +116,7 @@ import com.google.common.annotations.Vis
 @InterfaceStability.Evolving
 public abstract class Server {
   private final boolean authorize;
-  private boolean isSecurityEnabled;
+  private EnumSet<AuthMethod> enabledAuthMethods;
   private ExceptionsHandler exceptionsHandler = new ExceptionsHandler();
   
   public void addTerseExceptions(Class<?>... exceptionClass) {
@@ -1217,6 +1220,10 @@ public abstract class Server {
           AUDITLOG.warn(AUTH_FAILED_FOR + clientIP + ":" + attemptingUser);
           throw e;
         }
+        if (replyToken == null && authMethod == AuthMethod.PLAIN) {
+          // client needs at least response to know if it should use SIMPLE
+          replyToken = new byte[0];
+        }
         if (replyToken != null) {
           if (LOG.isDebugEnabled())
             LOG.debug("Will send token of size " + replyToken.length
@@ -1334,34 +1341,9 @@ public abstract class Server {
           if (authMethod == null) {
             throw new IOException("Unable to read authentication method");
           }
-          boolean useSaslServer = isSecurityEnabled;
-          final boolean clientUsingSasl;
-          switch (authMethod) {
-            case SIMPLE: { // no sasl for simple
-              clientUsingSasl = false;
-              break;
-            }
-            case DIGEST: { // always allow tokens if there's a secret manager
-              useSaslServer |= (secretManager != null);
-              clientUsingSasl = true;
-              break;
-            }
-            default: {
-              clientUsingSasl = true;
-              break;
-            }
-          }
-          if (useSaslServer) {
-            saslServer = createSaslServer(authMethod);
-          } else if (clientUsingSasl) { // security is off
-            doSaslReply(SaslStatus.SUCCESS, new IntWritable(
-                SaslRpcServer.SWITCH_TO_SIMPLE_AUTH), null, null);
-            authMethod = AuthMethod.SIMPLE;
-            // client has already sent the initial Sasl message and we
-            // should ignore it. Both client and server should fall back
-            // to simple auth from now on.
-            skipInitialSaslHandshake = true;
-          }
+  
+          // this may create a SASL server, or switch us into SIMPLE
+          authMethod = initializeAuthContext(authMethod);
           
           connectionHeaderBuf = null;
           connectionHeaderRead = true;
@@ -1409,10 +1391,24 @@ public abstract class Server {
       }
     }
 
-    private SaslServer createSaslServer(AuthMethod authMethod)
+    private AuthMethod initializeAuthContext(AuthMethod authMethod)
         throws IOException {
       try {
-        return createSaslServerInternal(authMethod);
+        if (enabledAuthMethods.contains(authMethod)) {
+          saslServer = createSaslServer(authMethod);
+        } else if (enabledAuthMethods.contains(AuthMethod.SIMPLE)) {
+          doSaslReply(SaslStatus.SUCCESS, new IntWritable(
+              SaslRpcServer.SWITCH_TO_SIMPLE_AUTH), null, null);
+          authMethod = AuthMethod.SIMPLE;
+          // client has already sent the initial Sasl message and we
+          // should ignore it. Both client and server should fall back
+          // to simple auth from now on.
+          skipInitialSaslHandshake = true;
+        } else {
+          throw new AccessControlException(
+              authMethod + " authentication is not enabled."
+                  + "  Available:" + enabledAuthMethods);
+        }
       } catch (IOException ioe) {
         final String ioeClass = ioe.getClass().getName();
         final String ioeMessage  = ioe.getLocalizedMessage();
@@ -1425,9 +1421,10 @@ public abstract class Server {
         }
         throw ioe;
       }
+      return authMethod;
     }
 
-    private SaslServer createSaslServerInternal(AuthMethod authMethod)
+    private SaslServer createSaslServer(AuthMethod authMethod)
         throws IOException {
       SaslServer saslServer = null;
       String hostname = null;
@@ -1436,18 +1433,9 @@ public abstract class Server {
       
       switch (authMethod) {
         case SIMPLE: {
-          throw new AccessControlException("Authorization ("
-              + CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION
-              + ") is enabled but authentication ("
-              + CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION
-              + ") is configured as simple. Please configure another method "
-              + "like kerberos or digest.");
+          return null; // no sasl for simple
         }
         case DIGEST: {
-          if (secretManager == null) {
-            throw new AccessControlException(
-                "Server is not configured to do DIGEST authentication.");
-          }
           secretManager.checkAvailableForRead();
           hostname = SaslRpcServer.SASL_DEFAULT_REALM;
           saslCallback = new SaslDigestCallbackHandler(secretManager, this);
@@ -1469,6 +1457,7 @@ public abstract class Server {
           break;
         }
         default:
+          // we should never be able to get here
           throw new AccessControlException(
               "Server does not support SASL " + authMethod);
       }
@@ -1908,7 +1897,9 @@ public abstract class Server {
     this.authorize = 
       conf.getBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, 
                       false);
-    this.isSecurityEnabled = UserGroupInformation.isSecurityEnabled();
+
+    // configure supported authentications
+    this.enabledAuthMethods = getAuthMethods(secretManager, conf);
     
     // Start the listener here and let it bind to the port
     listener = new Listener();
@@ -1929,6 +1920,31 @@ public abstract class Server {
     this.exceptionsHandler.addTerseExceptions(StandbyException.class);
   }
 
+  // get the security type from the conf. implicitly include token support
+  // if a secret manager is provided, or fail if token is the conf value but
+  // there is no secret manager
+  private EnumSet<AuthMethod> getAuthMethods(SecretManager<?> secretManager,
+                                             Configuration conf) {
+    AuthenticationMethod confAuthenticationMethod =
+        SecurityUtil.getAuthenticationMethod(conf);        
+    EnumSet<AuthMethod> authMethods =
+        EnumSet.of(confAuthenticationMethod.getAuthMethod()); 
+        
+    if (confAuthenticationMethod == AuthenticationMethod.TOKEN) {
+      if (secretManager == null) {
+        throw new IllegalArgumentException(AuthenticationMethod.TOKEN +
+            " authentication requires a secret manager");
+      } 
+    } else if (secretManager != null) {
+      LOG.debug(AuthenticationMethod.TOKEN +
+          " authentication enabled for secret manager");
+      authMethods.add(AuthenticationMethod.TOKEN.getAuthMethod());
+    }
+    
+    LOG.debug("Server accepts auth methods:" + authMethods);
+    return authMethods;
+  }
+  
   private void closeConnection(Connection connection) {
     synchronized (connectionList) {
       if (connectionList.remove(connection))
@@ -2045,16 +2061,6 @@ public abstract class Server {
     return conf;
   }
   
-  /** for unit testing only, should be called before server is started */ 
-  void disableSecurity() {
-    this.isSecurityEnabled = false;
-  }
-  
-  /** for unit testing only, should be called before server is started */ 
-  void enableSecurity() {
-    this.isSecurityEnabled = true;
-  }
-  
   /** Sets the socket buffer size used for responding to RPCs */
   public void setSocketSendBufSize(int size) { this.socketSendBufferSize = size; }
 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java Fri Nov 30 19:58:09 2012
@@ -28,6 +28,9 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.util.ReflectionUtils;
 
 /** The class represents a cluster of computer with a tree hierarchical
  * network topology.
@@ -52,6 +55,19 @@ public class NetworkTopology {
       super(msg);
     }
   }
+  
+  /**
+   * Get an instance of NetworkTopology based on the value of the configuration
+   * parameter net.topology.impl.
+   * 
+   * @param conf the configuration to be used
+   * @return an instance of NetworkTopology
+   */
+  public static NetworkTopology getInstance(Configuration conf){
+    return ReflectionUtils.newInstance(
+        conf.getClass(CommonConfigurationKeysPublic.NET_TOPOLOGY_IMPL_KEY,
+        NetworkTopology.class, NetworkTopology.class), conf);
+  }
 
   /** InnerNode represents a switch/router of a data center or rack.
    * Different from a leaf node, it has non-null children.

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java Fri Nov 30 19:58:09 2012
@@ -145,15 +145,13 @@ public class SaslRpcClient {
       byte[] saslToken = new byte[0];
       if (saslClient.hasInitialResponse())
         saslToken = saslClient.evaluateChallenge(saslToken);
-      if (saslToken != null) {
+      while (saslToken != null) {
         outStream.writeInt(saslToken.length);
         outStream.write(saslToken, 0, saslToken.length);
         outStream.flush();
         if (LOG.isDebugEnabled())
           LOG.debug("Have sent token of size " + saslToken.length
               + " from initSASLContext.");
-      }
-      if (!saslClient.isComplete()) {
         readStatus(inStream);
         int len = inStream.readInt();
         if (len == SaslRpcServer.SWITCH_TO_SIMPLE_AUTH) {
@@ -161,32 +159,18 @@ public class SaslRpcClient {
             LOG.debug("Server asks us to fall back to simple auth.");
           saslClient.dispose();
           return false;
+        } else if ((len == 0) && saslClient.isComplete()) {
+          break;
         }
         saslToken = new byte[len];
         if (LOG.isDebugEnabled())
           LOG.debug("Will read input token of size " + saslToken.length
               + " for processing by initSASLContext");
         inStream.readFully(saslToken);
-      }
-
-      while (!saslClient.isComplete()) {
         saslToken = saslClient.evaluateChallenge(saslToken);
-        if (saslToken != null) {
-          if (LOG.isDebugEnabled())
-            LOG.debug("Will send token of size " + saslToken.length
-                + " from initSASLContext.");
-          outStream.writeInt(saslToken.length);
-          outStream.write(saslToken, 0, saslToken.length);
-          outStream.flush();
-        }
-        if (!saslClient.isComplete()) {
-          readStatus(inStream);
-          saslToken = new byte[inStream.readInt()];
-          if (LOG.isDebugEnabled())
-            LOG.debug("Will read input token of size " + saslToken.length
-                + " for processing by initSASLContext");
-          inStream.readFully(saslToken);
-        }
+      }
+      if (!saslClient.isComplete()) { // shouldn't happen
+        throw new SaslException("Internal negotiation error");
       }
       if (LOG.isDebugEnabled()) {
         LOG.debug("SASL client context established. Negotiated QoP: "

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java Fri Nov 30 19:58:09 2012
@@ -23,6 +23,7 @@ import java.io.DataInput;
 import java.io.DataInputStream;
 import java.io.DataOutput;
 import java.io.IOException;
+import java.security.Security;
 import java.util.Map;
 import java.util.TreeMap;
 
@@ -89,6 +90,7 @@ public class SaslRpcServer {
     
     SASL_PROPS.put(Sasl.QOP, saslQOP.getSaslQop());
     SASL_PROPS.put(Sasl.SERVER_AUTH, "true");
+    Security.addProvider(new SaslPlainServer.SecurityProvider());
   }
   
   static String encodeIdentifier(byte[] identifier) {
@@ -138,7 +140,8 @@ public class SaslRpcServer {
   public static enum AuthMethod {
     SIMPLE((byte) 80, ""),
     KERBEROS((byte) 81, "GSSAPI"),
-    DIGEST((byte) 82, "DIGEST-MD5");
+    DIGEST((byte) 82, "DIGEST-MD5"),
+    PLAIN((byte) 83, "PLAIN");
 
     /** The code for this method. */
     public final byte code;



Mime
View raw message