hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sur...@apache.org
Subject svn commit: r1420375 [1/2] - in /hadoop/common/branches/branch-trunk-win/hadoop-common-project: hadoop-auth/ hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/ hadoop-auth/src/site/apt/ hadoop-auth/src/test/java/org/apache/hado...
Date Tue, 11 Dec 2012 20:09:11 GMT
Author: suresh
Date: Tue Dec 11 20:08:00 2012
New Revision: 1420375

URL: http://svn.apache.org/viewvc?rev=1420375&view=rev
Log:
Mergng trunk to branch-trunk-win

Added:
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AltKerberosAuthenticationHandler.java
      - copied unchanged from r1420366, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AltKerberosAuthenticationHandler.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAltKerberosAuthenticationHandler.java
      - copied unchanged from r1420366, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAltKerberosAuthenticationHandler.java
Modified:
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-auth/pom.xml
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-auth/src/site/apt/Configuration.apt.vm
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-auth/src/site/apt/index.apt.vm
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/   (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/   (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystem.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/core/   (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextCreateMkdirBaseTest.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextSymlinkBaseTest.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextUtilBase.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFSMainOperationsLocalFileSystem.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextSymlink.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFsFCStatistics.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegation.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemLocalFileSystem.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemWithAuthorityLocalFileSystem.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsBaseTest.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestActiveStandbyElector.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestUTF8.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-auth/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-auth/pom.xml?rev=1420375&r1=1420374&r2=1420375&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-auth/pom.xml (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-auth/pom.xml Tue Dec 11 20:08:00 2012
@@ -110,6 +110,7 @@
             <exclude>**/${test.exclude}.java</exclude>
             <exclude>${test.exclude.pattern}</exclude>
             <exclude>**/TestKerberosAuth*.java</exclude>
+            <exclude>**/TestAltKerberosAuth*.java</exclude>
             <exclude>**/Test*$*.java</exclude>
           </excludes>
         </configuration>

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-auth/src/site/apt/Configuration.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-auth/src/site/apt/Configuration.apt.vm?rev=1420375&r1=1420374&r2=1420375&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-auth/src/site/apt/Configuration.apt.vm (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-auth/src/site/apt/Configuration.apt.vm Tue Dec 11 20:08:00 2012
@@ -178,4 +178,71 @@ Configuration
 </web-app>
 +---+
 
+** AltKerberos Configuration
+
+  <<IMPORTANT>>: A KDC must be configured and running.
+
+  The AltKerberos authentication mechanism is a partially implemented derivative
+  of the Kerberos SPNEGO authentication mechanism which allows a "mixed" form of
+  authentication where Kerberos SPNEGO is used by non-browsers while an
+  alternate form of authentication (to be implemented by the user) is used for
+  browsers.  To use AltKerberos as the authentication mechanism (besides
+  providing an implementation), the authentication filter must be configured
+  with the following init parameters, in addition to the previously mentioned
+  Kerberos SPNEGO ones:
+
+    * <<<[PREFIX.]type>>>: the full class name of the implementation of
+      AltKerberosAuthenticationHandler to use.
+
+    * <<<[PREFIX.]alt-kerberos.non-browser.user-agents>>>: a comma-separated
+      list of which user-agents should be considered non-browsers.
+
+  <<Example>>:
+
++---+
+<web-app version="2.5" xmlns="http://java.sun.com/xml/ns/javaee">
+    ...
+
+    <filter>
+        <filter-name>kerberosFilter</filter-name>
+        <filter-class>org.apache.hadoop.security.auth.server.AuthenticationFilter</filter-class>
+        <init-param>
+            <param-name>type</param-name>
+            <param-value>org.my.subclass.of.AltKerberosAuthenticationHandler</param-value>
+        </init-param>
+        <init-param>
+            <param-name>alt-kerberos.non-browser.user-agents</param-name>
+            <param-value>java,curl,wget,perl</param-value>
+        </init-param>
+        <init-param>
+            <param-name>token.validity</param-name>
+            <param-value>30</param-value>
+        </init-param>
+        <init-param>
+            <param-name>cookie.domain</param-name>
+            <param-value>.foo.com</param-value>
+        </init-param>
+        <init-param>
+            <param-name>cookie.path</param-name>
+            <param-value>/</param-value>
+        </init-param>
+        <init-param>
+            <param-name>kerberos.principal</param-name>
+            <param-value>HTTP/localhost@LOCALHOST</param-value>
+        </init-param>
+        <init-param>
+            <param-name>kerberos.keytab</param-name>
+            <param-value>/tmp/auth.keytab</param-value>
+        </init-param>
+    </filter>
+
+    <filter-mapping>
+        <filter-name>kerberosFilter</filter-name>
+        <url-pattern>/kerberos/*</url-pattern>
+    </filter-mapping>
+
+    ...
+</web-app>
++---+
+
   \[ {{{./index.html}Go Back}} \]

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-auth/src/site/apt/index.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-auth/src/site/apt/index.apt.vm?rev=1420375&r1=1420374&r2=1420375&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-auth/src/site/apt/index.apt.vm (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-auth/src/site/apt/index.apt.vm Tue Dec 11 20:08:00 2012
@@ -24,6 +24,11 @@ Hadoop Auth, Java HTTP SPNEGO ${project.
   Hadoop Auth also supports additional authentication mechanisms on the client
   and the server side via 2 simple interfaces.
 
+  Additionally, it provides a partially implemented derivative of the Kerberos
+  SPNEGO authentication to allow a "mixed" form of authentication where Kerberos
+  SPNEGO is used by non-browsers while an alternate form of authentication
+  (to be implemented by the user) is used for browsers.
+
 * License
 
   Hadoop Auth is distributed under {{{http://www.apache.org/licenses/}Apache

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java?rev=1420375&r1=1420374&r2=1420375&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java Tue Dec 11 20:08:00 2012
@@ -28,23 +28,37 @@ import org.ietf.jgss.Oid;
 
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
-import java.lang.reflect.Field;
 import java.util.Properties;
 import java.util.concurrent.Callable;
 
 public class TestKerberosAuthenticationHandler extends TestCase {
 
-  private KerberosAuthenticationHandler handler;
+  protected KerberosAuthenticationHandler handler;
+
+  protected KerberosAuthenticationHandler getNewAuthenticationHandler() {
+    return new KerberosAuthenticationHandler();
+  }
+
+  protected String getExpectedType() {
+    return KerberosAuthenticationHandler.TYPE;
+  }
+
+  protected Properties getDefaultProperties() {
+    Properties props = new Properties();
+    props.setProperty(KerberosAuthenticationHandler.PRINCIPAL,
+            KerberosTestUtils.getServerPrincipal());
+    props.setProperty(KerberosAuthenticationHandler.KEYTAB,
+            KerberosTestUtils.getKeytabFile());
+    props.setProperty(KerberosAuthenticationHandler.NAME_RULES,
+            "RULE:[1:$1@$0](.*@" + KerberosTestUtils.getRealm()+")s/@.*//\n");
+    return props;
+  }
 
   @Override
   protected void setUp() throws Exception {
     super.setUp();
-    handler = new KerberosAuthenticationHandler();
-    Properties props = new Properties();
-    props.setProperty(KerberosAuthenticationHandler.PRINCIPAL, KerberosTestUtils.getServerPrincipal());
-    props.setProperty(KerberosAuthenticationHandler.KEYTAB, KerberosTestUtils.getKeytabFile());
-    props.setProperty(KerberosAuthenticationHandler.NAME_RULES,
-                      "RULE:[1:$1@$0](.*@" + KerberosTestUtils.getRealm()+")s/@.*//\n");
+    handler = getNewAuthenticationHandler();
+    Properties props = getDefaultProperties();
     try {
       handler.init(props);
     } catch (Exception ex) {
@@ -71,10 +85,8 @@ public class TestKerberosAuthenticationH
 
     KerberosName.setRules("RULE:[1:$1@$0](.*@FOO)s/@.*//\nDEFAULT");
     
-    handler = new KerberosAuthenticationHandler();
-    Properties props = new Properties();
-    props.setProperty(KerberosAuthenticationHandler.PRINCIPAL, KerberosTestUtils.getServerPrincipal());
-    props.setProperty(KerberosAuthenticationHandler.KEYTAB, KerberosTestUtils.getKeytabFile());
+    handler = getNewAuthenticationHandler();
+    Properties props = getDefaultProperties();
     props.setProperty(KerberosAuthenticationHandler.NAME_RULES, "RULE:[1:$1@$0](.*@BAR)s/@.*//\nDEFAULT");
     try {
       handler.init(props);
@@ -97,8 +109,7 @@ public class TestKerberosAuthenticationH
   }
 
   public void testType() throws Exception {
-    KerberosAuthenticationHandler handler = new KerberosAuthenticationHandler();
-    assertEquals(KerberosAuthenticationHandler.TYPE, handler.getType());
+    assertEquals(getExpectedType(), handler.getType());
   }
 
   public void testRequestWithoutAuthorization() throws Exception {
@@ -182,7 +193,7 @@ public class TestKerberosAuthenticationH
 
       assertEquals(KerberosTestUtils.getClientPrincipal(), authToken.getName());
       assertTrue(KerberosTestUtils.getClientPrincipal().startsWith(authToken.getUserName()));
-      assertEquals(KerberosAuthenticationHandler.TYPE, authToken.getType());
+      assertEquals(getExpectedType(), authToken.getType());
     } else {
       Mockito.verify(response).setHeader(Mockito.eq(KerberosAuthenticator.WWW_AUTHENTICATE),
                                          Mockito.matches(KerberosAuthenticator.NEGOTIATE + " .*"));

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1420375&r1=1420374&r2=1420375&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt Tue Dec 11 20:08:00 2012
@@ -143,6 +143,9 @@ Trunk (Unreleased)
 
   BUG FIXES
 
+    HADOOP-8418. Update UGI Principal classes name for running with
+    IBM JDK on 64 bits Windows.  (Yu Gao via eyang)
+
     HADOOP-8177. MBeans shouldn't try to register when it fails to create MBeanName.
     (Devaraj K via umamahesh)
 
@@ -289,6 +292,12 @@ Trunk (Unreleased)
     HADOOP-9037. Bug in test-patch.sh and precommit build process (Kihwal Lee
     via jlowe)
 
+    HADOOP-9121. InodeTree.java has redundant check for vName while 
+    throwing exception. (Arup Malakar via suresh)
+
+    HADOOP-9131. Turn off TestLocalFileSystem#testListStatusWithColons on
+    Windows. (Chris Nauroth via suresh)
+
   OPTIMIZATIONS
 
     HADOOP-7761. Improve the performance of raw comparisons. (todd)
@@ -306,6 +315,12 @@ Release 2.0.3-alpha - Unreleased 
 
     HADOOP-9020. Add a SASL PLAIN server (daryn via bobby)
 
+    HADOOP-9090. Support on-demand publish of metrics. (Mostafa Elhemali via
+    suresh)
+
+    HADOOP-9054. Add AuthenticationHandler that uses Kerberos but allows for 
+    an alternate form of authentication for browsers. (rkanter via tucu)
+
   IMPROVEMENTS
 
     HADOOP-8789. Tests setLevel(Level.OFF) should be Level.ERROR.
@@ -456,6 +471,17 @@ Release 2.0.3-alpha - Unreleased 
     HADOOP-8958. ViewFs:Non absolute mount name failures when running 
     multiple tests on Windows. (Chris Nauroth via suresh)
 
+    HADOOP-9103. UTF8 class does not properly decode Unicode characters
+    outside the basic multilingual plane. (todd)
+
+    HADOOP-9070. Kerberos SASL server cannot find kerberos key. (daryn via atm)
+
+    HADOOP-6762. Exception while doing RPC I/O closes channel
+    (Sam Rash and todd via todd)
+
+    HADOOP-9126. FormatZK and ZKFC startup can fail due to zkclient connection
+    establishment delay. (Rakesh R and todd via todd)
+
 Release 2.0.2-alpha - 2012-09-07 
 
   INCOMPATIBLE CHANGES

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1415787-1420366

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1415787-1420366

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1415787-1420366

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java?rev=1420375&r1=1420374&r2=1420375&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java Tue Dec 11 20:08:00 2012
@@ -118,8 +118,7 @@ abstract class InodeTree<T> {
       return result;
     }
     
-    INode<T> resolveInternal(final String pathComponent)
-        throws FileNotFoundException {
+    INode<T> resolveInternal(final String pathComponent) {
       return children.get(pathComponent);
     }
     
@@ -336,8 +335,8 @@ abstract class InodeTree<T> {
     }
     if (!gotMountTableEntry) {
       throw new IOException(
-          "ViewFs: Cannot initialize: Empty Mount table in config for " + 
-             vName == null ? "viewfs:///" : ("viewfs://" + vName + "/"));
+          "ViewFs: Cannot initialize: Empty Mount table in config for " +
+             "viewfs://" + vName + "/");
     }
   }
 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java?rev=1420375&r1=1420374&r2=1420375&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java Tue Dec 11 20:08:00 2012
@@ -21,6 +21,8 @@ package org.apache.hadoop.ha;
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.List;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 
@@ -45,6 +47,7 @@ import org.apache.zookeeper.KeeperExcept
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
 
 /**
  * 
@@ -205,7 +208,7 @@ public class ActiveStandbyElector implem
       int zookeeperSessionTimeout, String parentZnodeName, List<ACL> acl,
       List<ZKAuthInfo> authInfo,
       ActiveStandbyElectorCallback app) throws IOException,
-      HadoopIllegalArgumentException {
+      HadoopIllegalArgumentException, KeeperException {
     if (app == null || acl == null || parentZnodeName == null
         || zookeeperHostPorts == null || zookeeperSessionTimeout <= 0) {
       throw new HadoopIllegalArgumentException("Invalid argument");
@@ -602,10 +605,24 @@ public class ActiveStandbyElector implem
    * 
    * @return new zookeeper client instance
    * @throws IOException
+   * @throws KeeperException zookeeper connectionloss exception
    */
-  protected synchronized ZooKeeper getNewZooKeeper() throws IOException {
-    ZooKeeper zk = new ZooKeeper(zkHostPort, zkSessionTimeout, null);
-    zk.register(new WatcherWithClientRef(zk));
+  protected synchronized ZooKeeper getNewZooKeeper() throws IOException,
+      KeeperException {
+    
+    // Unfortunately, the ZooKeeper constructor connects to ZooKeeper and
+    // may trigger the Connected event immediately. So, if we register the
+    // watcher after constructing ZooKeeper, we may miss that event. Instead,
+    // we construct the watcher first, and have it queue any events it receives
+    // before we can set its ZooKeeper reference.
+    WatcherWithClientRef watcher = new WatcherWithClientRef();
+    ZooKeeper zk = new ZooKeeper(zkHostPort, zkSessionTimeout, watcher);
+    watcher.setZooKeeperRef(zk);
+
+    // Wait for the asynchronous success/failure. This may throw an exception
+    // if we don't connect within the session timeout.
+    watcher.waitForZKConnectionEvent(zkSessionTimeout);
+    
     for (ZKAuthInfo auth : zkAuthInfo) {
       zk.addAuthInfo(auth.getScheme(), auth.getAuth());
     }
@@ -710,13 +727,16 @@ public class ActiveStandbyElector implem
       } catch(IOException e) {
         LOG.warn(e);
         sleepFor(5000);
+      } catch(KeeperException e) {
+        LOG.warn(e);
+        sleepFor(5000);
       }
       ++connectionRetryCount;
     }
     return success;
   }
 
-  private void createConnection() throws IOException {
+  private void createConnection() throws IOException, KeeperException {
     if (zkClient != null) {
       try {
         zkClient.close();
@@ -973,14 +993,76 @@ public class ActiveStandbyElector implem
    * events.
    */
   private final class WatcherWithClientRef implements Watcher {
-    private final ZooKeeper zk;
+    private ZooKeeper zk;
+    
+    /**
+     * Latch fired whenever any event arrives. This is used in order
+     * to wait for the Connected event when the client is first created.
+     */
+    private CountDownLatch hasReceivedEvent = new CountDownLatch(1);
+
+    /**
+     * If any events arrive before the reference to ZooKeeper is set,
+     * they get queued up and later forwarded when the reference is
+     * available.
+     */
+    private final List<WatchedEvent> queuedEvents = Lists.newLinkedList();
+    
+    private WatcherWithClientRef() {
+    }
 
     private WatcherWithClientRef(ZooKeeper zk) {
       this.zk = zk;
     }
+    
+    /**
+     * Waits for the next event from ZooKeeper to arrive.
+     * 
+     * @param connectionTimeoutMs zookeeper connection timeout in milliseconds
+     * @throws KeeperException if the connection attempt times out. This will
+     * be a ZooKeeper ConnectionLoss exception code.
+     * @throws IOException if interrupted while connecting to ZooKeeper
+     */
+    private void waitForZKConnectionEvent(int connectionTimeoutMs)
+        throws KeeperException, IOException {
+      try {
+        if (!hasReceivedEvent.await(connectionTimeoutMs, TimeUnit.MILLISECONDS)) {
+          LOG.error("Connection timed out: couldn't connect to ZooKeeper in "
+              + connectionTimeoutMs + " milliseconds");
+          synchronized (this) {
+            zk.close();
+          }
+          throw KeeperException.create(Code.CONNECTIONLOSS);
+        }
+      } catch (InterruptedException e) {
+        Thread.currentThread().interrupt();
+        throw new IOException(
+            "Interrupted when connecting to zookeeper server", e);
+      }
+    }
+
+    private synchronized void setZooKeeperRef(ZooKeeper zk) {
+      Preconditions.checkState(this.zk == null,
+          "zk already set -- must be set exactly once");
+      this.zk = zk;
+      
+      for (WatchedEvent e : queuedEvents) {
+        forwardEvent(e);
+      }
+      queuedEvents.clear();
+    }
 
     @Override
-    public void process(WatchedEvent event) {
+    public synchronized void process(WatchedEvent event) {
+      if (zk != null) {
+        forwardEvent(event);
+      } else {
+        queuedEvents.add(event);
+      }
+    }
+    
+    private void forwardEvent(WatchedEvent event) {
+      hasReceivedEvent.countDown();
       try {
         ActiveStandbyElector.this.processWatchEvent(
             zk, event);
@@ -1024,5 +1106,4 @@ public class ActiveStandbyElector implem
       ((appData == null) ? "null" : StringUtils.byteToHexString(appData)) + 
       " cb=" + appClient;
   }
-
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java?rev=1420375&r1=1420374&r2=1420375&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java Tue Dec 11 20:08:00 2012
@@ -180,7 +180,15 @@ public abstract class ZKFailoverControll
 
   private int doRun(String[] args)
       throws HadoopIllegalArgumentException, IOException, InterruptedException {
-    initZK();
+    try {
+      initZK();
+    } catch (KeeperException ke) {
+      LOG.fatal("Unable to start failover controller. Unable to connect "
+          + "to ZooKeeper quorum at " + zkQuorum + ". Please check the "
+          + "configured value for " + ZK_QUORUM_KEY + " and ensure that "
+          + "ZooKeeper is running.");
+      return ERR_CODE_NO_ZK;
+    }
     if (args.length > 0) {
       if ("-formatZK".equals(args[0])) {
         boolean force = false;
@@ -199,24 +207,12 @@ public abstract class ZKFailoverControll
         badArg(args[0]);
       }
     }
-    
-    try {
-      if (!elector.parentZNodeExists()) {
-        LOG.fatal("Unable to start failover controller. " +
-            "Parent znode does not exist.\n" +
-            "Run with -formatZK flag to initialize ZooKeeper.");
-        return ERR_CODE_NO_PARENT_ZNODE;
-      }
-    } catch (IOException ioe) {
-      if (ioe.getCause() instanceof KeeperException.ConnectionLossException) {
-        LOG.fatal("Unable to start failover controller. Unable to connect " +
-            "to ZooKeeper quorum at " + zkQuorum + ". Please check the " +
-            "configured value for " + ZK_QUORUM_KEY + " and ensure that " +
-            "ZooKeeper is running.");
-        return ERR_CODE_NO_ZK;
-      } else {
-        throw ioe;
-      }
+
+    if (!elector.parentZNodeExists()) {
+      LOG.fatal("Unable to start failover controller. "
+          + "Parent znode does not exist.\n"
+          + "Run with -formatZK flag to initialize ZooKeeper.");
+      return ERR_CODE_NO_PARENT_ZNODE;
     }
 
     try {
@@ -310,7 +306,8 @@ public abstract class ZKFailoverControll
   }
 
 
-  private void initZK() throws HadoopIllegalArgumentException, IOException {
+  private void initZK() throws HadoopIllegalArgumentException, IOException,
+      KeeperException {
     zkQuorum = conf.get(ZK_QUORUM_KEY);
     int zkTimeout = conf.getInt(ZK_SESSION_TIMEOUT_KEY,
         ZK_SESSION_TIMEOUT_DEFAULT);

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java?rev=1420375&r1=1420374&r2=1420375&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java Tue Dec 11 20:08:00 2012
@@ -1858,10 +1858,10 @@ public class SequenceFile {
         UTF8 className = new UTF8();
 
         className.readFields(in);
-        keyClassName = className.toString(); // key class name
+        keyClassName = className.toStringChecked(); // key class name
 
         className.readFields(in);
-        valClassName = className.toString(); // val class name
+        valClassName = className.toStringChecked(); // val class name
       } else {
         keyClassName = Text.readString(in);
         valClassName = Text.readString(in);

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java?rev=1420375&r1=1420374&r2=1420375&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java Tue Dec 11 20:08:00 2012
@@ -21,7 +21,9 @@ package org.apache.hadoop.io;
 import java.io.IOException;
 import java.io.DataInput;
 import java.io.DataOutput;
+import java.io.UTFDataFormatException;
 
+import org.apache.hadoop.util.StringUtils;
 
 import org.apache.commons.logging.*;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -31,6 +33,9 @@ import org.apache.hadoop.classification.
  * 
  * <p>Also includes utilities for efficiently reading and writing UTF-8.
  *
+ * Note that this decodes UTF-8 but actually encodes CESU-8, a variant of
+ * UTF-8: see http://en.wikipedia.org/wiki/CESU-8
+ *
  * @deprecated replaced by Text
  */
 @Deprecated
@@ -151,6 +156,21 @@ public class UTF8 implements WritableCom
     }
     return buffer.toString();
   }
+  
+  /**
+   * Convert to a string, checking for valid UTF8.
+   * @return the converted string
+   * @throws UTFDataFormatException if the underlying bytes contain invalid
+   * UTF8 data.
+   */
+  public String toStringChecked() throws IOException {
+    StringBuilder buffer = new StringBuilder(length);
+    synchronized (IBUF) {
+      IBUF.reset(bytes, length);
+      readChars(IBUF, buffer, length);
+    }
+    return buffer.toString();
+  }
 
   /** Returns true iff <code>o</code> is a UTF8 with the same contents.  */
   @Override
@@ -209,6 +229,19 @@ public class UTF8 implements WritableCom
     return result;
   }
 
+  /**
+   * Convert a UTF-8 encoded byte array back into a string.
+   *
+   * @throws IOException if the byte array is invalid UTF8
+   */
+  public static String fromBytes(byte[] bytes) throws IOException {
+    DataInputBuffer dbuf = new DataInputBuffer();
+    dbuf.reset(bytes, 0, bytes.length);
+    StringBuilder buf = new StringBuilder(bytes.length);
+    readChars(dbuf, buf, bytes.length);
+    return buf.toString();
+  }
+
   /** Read a UTF-8 encoded string.
    *
    * @see DataInput#readUTF()
@@ -221,7 +254,7 @@ public class UTF8 implements WritableCom
   }
 
   private static void readChars(DataInput in, StringBuilder buffer, int nBytes)
-    throws IOException {
+    throws UTFDataFormatException, IOException {
     DataOutputBuffer obuf = OBUF_FACTORY.get();
     obuf.reset();
     obuf.write(in, nBytes);
@@ -230,18 +263,60 @@ public class UTF8 implements WritableCom
     while (i < nBytes) {
       byte b = bytes[i++];
       if ((b & 0x80) == 0) {
+        // 0b0xxxxxxx: 1-byte sequence
         buffer.append((char)(b & 0x7F));
-      } else if ((b & 0xE0) != 0xE0) {
+      } else if ((b & 0xE0) == 0xC0) {
+        if (i >= nBytes) {
+          throw new UTFDataFormatException("Truncated UTF8 at " +
+              StringUtils.byteToHexString(bytes, i - 1, 1));
+        }
+        // 0b110xxxxx: 2-byte sequence
         buffer.append((char)(((b & 0x1F) << 6)
             | (bytes[i++] & 0x3F)));
-      } else {
+      } else if ((b & 0xF0) == 0xE0) {
+        // 0b1110xxxx: 3-byte sequence
+        if (i + 1 >= nBytes) {
+          throw new UTFDataFormatException("Truncated UTF8 at " +
+              StringUtils.byteToHexString(bytes, i - 1, 2));
+        }
         buffer.append((char)(((b & 0x0F) << 12)
             | ((bytes[i++] & 0x3F) << 6)
             |  (bytes[i++] & 0x3F)));
+      } else if ((b & 0xF8) == 0xF0) {
+        if (i + 2 >= nBytes) {
+          throw new UTFDataFormatException("Truncated UTF8 at " +
+              StringUtils.byteToHexString(bytes, i - 1, 3));
+        }
+        // 0b11110xxx: 4-byte sequence
+        int codepoint =
+            ((b & 0x07) << 18)
+          | ((bytes[i++] & 0x3F) <<  12)
+          | ((bytes[i++] & 0x3F) <<  6)
+          | ((bytes[i++] & 0x3F));
+        buffer.append(highSurrogate(codepoint))
+              .append(lowSurrogate(codepoint));
+      } else {
+        // The UTF8 standard describes 5-byte and 6-byte sequences, but
+        // these are no longer allowed as of 2003 (see RFC 3629)
+
+        // Only show the next 6 bytes max in the error code - in case the
+        // buffer is large, this will prevent an exceedingly large message.
+        int endForError = Math.min(i + 5, nBytes);
+        throw new UTFDataFormatException("Invalid UTF8 at " +
+            StringUtils.byteToHexString(bytes, i - 1, endForError));
       }
     }
   }
 
+  private static char highSurrogate(int codePoint) {
+    return (char) ((codePoint >>> 10)
+        + (Character.MIN_HIGH_SURROGATE - (Character.MIN_SUPPLEMENTARY_CODE_POINT >>> 10)));
+  }
+
+  private static char lowSurrogate(int codePoint) {
+    return (char) ((codePoint & 0x3ff) + Character.MIN_LOW_SURROGATE);
+  }
+
   /** Write a UTF-8 encoded string.
    *
    * @see DataOutput#writeUTF(String)

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1420375&r1=1420374&r2=1420375&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java Tue Dec 11 20:08:00 2012
@@ -38,6 +38,11 @@ import java.util.Iterator;
 import java.util.Map.Entry;
 import java.util.Random;
 import java.util.Set;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.RejectedExecutionException;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicLong;
@@ -78,6 +83,8 @@ import org.apache.hadoop.util.ProtoUtil;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.Time;
 
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
+
 /** A client for an IPC service.  IPC calls take a single {@link Writable} as a
  * parameter, and return a {@link Writable} as their value.  A service runs on
  * a port and is defined by a parameter class and a value class.
@@ -104,6 +111,19 @@ public class Client {
   final static int PING_CALL_ID = -1;
   
   /**
+   * Executor on which IPC calls' parameters are sent. Deferring
+   * the sending of parameters to a separate thread isolates them
+   * from thread interruptions in the calling code.
+   */
+  private static final ExecutorService SEND_PARAMS_EXECUTOR = 
+    Executors.newCachedThreadPool(
+        new ThreadFactoryBuilder()
+        .setDaemon(true)
+        .setNameFormat("IPC Parameter Sending Thread #%d")
+        .build());
+
+  
+  /**
    * set the ping interval value in configuration
    * 
    * @param conf Configuration
@@ -245,6 +265,8 @@ public class Client {
     private AtomicLong lastActivity = new AtomicLong();// last I/O activity time
     private AtomicBoolean shouldCloseConnection = new AtomicBoolean();  // indicate if the connection is closed
     private IOException closeException; // close reason
+    
+    private final Object sendParamsLock = new Object();
 
     public Connection(ConnectionId remoteId) throws IOException {
       this.remoteId = remoteId;
@@ -831,43 +853,76 @@ public class Client {
      * Note: this is not called from the Connection thread, but by other
      * threads.
      */
-    public void sendParam(Call call) {
+    public void sendParam(final Call call)
+        throws InterruptedException, IOException {
       if (shouldCloseConnection.get()) {
         return;
       }
 
-      DataOutputBuffer d=null;
-      try {
-        synchronized (this.out) {
-          if (LOG.isDebugEnabled())
-            LOG.debug(getName() + " sending #" + call.id);
+      // Serialize the call to be sent. This is done from the actual
+      // caller thread, rather than the SEND_PARAMS_EXECUTOR thread,
+      // so that if the serialization throws an error, it is reported
+      // properly. This also parallelizes the serialization.
+      //
+      // Format of a call on the wire:
+      // 0) Length of rest below (1 + 2)
+      // 1) PayloadHeader  - is serialized Delimited hence contains length
+      // 2) the Payload - the RpcRequest
+      //
+      // Items '1' and '2' are prepared here. 
+      final DataOutputBuffer d = new DataOutputBuffer();
+      RpcPayloadHeaderProto header = ProtoUtil.makeRpcPayloadHeader(
+         call.rpcKind, RpcPayloadOperationProto.RPC_FINAL_PAYLOAD, call.id);
+      header.writeDelimitedTo(d);
+      call.rpcRequest.write(d);
+
+      synchronized (sendParamsLock) {
+        Future<?> senderFuture = SEND_PARAMS_EXECUTOR.submit(new Runnable() {
+          @Override
+          public void run() {
+            try {
+              synchronized (Connection.this.out) {
+                if (shouldCloseConnection.get()) {
+                  return;
+                }
+                
+                if (LOG.isDebugEnabled())
+                  LOG.debug(getName() + " sending #" + call.id);
+         
+                byte[] data = d.getData();
+                int totalLength = d.getLength();
+                out.writeInt(totalLength); // Total Length
+                out.write(data, 0, totalLength);//PayloadHeader + RpcRequest
+                out.flush();
+              }
+            } catch (IOException e) {
+              // exception at this point would leave the connection in an
+              // unrecoverable state (eg half a call left on the wire).
+              // So, close the connection, killing any outstanding calls
+              markClosed(e);
+            } finally {
+              //the buffer is just an in-memory buffer, but it is still polite to
+              // close early
+              IOUtils.closeStream(d);
+            }
+          }
+        });
+      
+        try {
+          senderFuture.get();
+        } catch (ExecutionException e) {
+          Throwable cause = e.getCause();
           
-          // Serializing the data to be written.
-          // Format:
-          // 0) Length of rest below (1 + 2)
-          // 1) PayloadHeader  - is serialized Delimited hence contains length
-          // 2) the Payload - the RpcRequest
-          //
-          d = new DataOutputBuffer();
-          RpcPayloadHeaderProto header = ProtoUtil.makeRpcPayloadHeader(
-             call.rpcKind, RpcPayloadOperationProto.RPC_FINAL_PAYLOAD, call.id);
-          header.writeDelimitedTo(d);
-          call.rpcRequest.write(d);
-          byte[] data = d.getData();
-   
-          int totalLength = d.getLength();
-          out.writeInt(totalLength); // Total Length
-          out.write(data, 0, totalLength);//PayloadHeader + RpcRequest
-          out.flush();
+          // cause should only be a RuntimeException as the Runnable above
+          // catches IOException
+          if (cause instanceof RuntimeException) {
+            throw (RuntimeException) cause;
+          } else {
+            throw new RuntimeException("unexpected checked exception", cause);
+          }
         }
-      } catch(IOException e) {
-        markClosed(e);
-      } finally {
-        //the buffer is just an in-memory buffer, but it is still polite to
-        // close early
-        IOUtils.closeStream(d);
       }
-    }  
+    }
 
     /* Receive a response.
      * Because only one receiver, so no synchronization on in.
@@ -1138,7 +1193,16 @@ public class Client {
       ConnectionId remoteId) throws InterruptedException, IOException {
     Call call = new Call(rpcKind, rpcRequest);
     Connection connection = getConnection(remoteId, call);
-    connection.sendParam(call);                 // send the parameter
+    try {
+      connection.sendParam(call);                 // send the parameter
+    } catch (RejectedExecutionException e) {
+      throw new IOException("connection has been closed", e);
+    } catch (InterruptedException e) {
+      Thread.currentThread().interrupt();
+      LOG.warn("interrupted waiting to send params to server", e);
+      throw new IOException(e);
+    }
+
     boolean interrupted = false;
     synchronized (call) {
       while (!call.done) {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1420375&r1=1420374&r2=1420375&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java Tue Dec 11 20:08:00 2012
@@ -199,7 +199,8 @@ public abstract class Server {
   //     in ObjectWritable to efficiently transmit arrays of primitives
   // 6 : Made RPC payload header explicit
   // 7 : Changed Ipc Connection Header to use Protocol buffers
-  public static final byte CURRENT_VERSION = 7;
+  // 8 : SASL server always sends a final response
+  public static final byte CURRENT_VERSION = 8;
 
   /**
    * Initial and max size of response buffer
@@ -1220,8 +1221,8 @@ public abstract class Server {
           AUDITLOG.warn(AUTH_FAILED_FOR + clientIP + ":" + attemptingUser);
           throw e;
         }
-        if (replyToken == null && authMethod == AuthMethod.PLAIN) {
-          // client needs at least response to know if it should use SIMPLE
+        if (saslServer.isComplete() && replyToken == null) {
+          // send final response for success
           replyToken = new byte[0];
         }
         if (replyToken != null) {
@@ -1392,7 +1393,7 @@ public abstract class Server {
     }
 
     private AuthMethod initializeAuthContext(AuthMethod authMethod)
-        throws IOException {
+        throws IOException, InterruptedException {
       try {
         if (enabledAuthMethods.contains(authMethod)) {
           saslServer = createSaslServer(authMethod);
@@ -1425,8 +1426,7 @@ public abstract class Server {
     }
 
     private SaslServer createSaslServer(AuthMethod authMethod)
-        throws IOException {
-      SaslServer saslServer = null;
+        throws IOException, InterruptedException {
       String hostname = null;
       String saslProtocol = null;
       CallbackHandler saslCallback = null;
@@ -1462,10 +1462,23 @@ public abstract class Server {
               "Server does not support SASL " + authMethod);
       }
       
-      String mechanism = authMethod.getMechanismName();
-      saslServer = Sasl.createSaslServer(
-          mechanism, saslProtocol, hostname,
-          SaslRpcServer.SASL_PROPS, saslCallback);
+      return createSaslServer(authMethod.getMechanismName(), saslProtocol,
+                              hostname, saslCallback);                                    
+    }
+
+    private SaslServer createSaslServer(final String mechanism,
+                                        final String protocol,
+                                        final String hostname,
+                                        final CallbackHandler callback
+        ) throws IOException, InterruptedException {
+      SaslServer saslServer = UserGroupInformation.getCurrentUser().doAs(
+          new PrivilegedExceptionAction<SaslServer>() {
+            @Override
+            public SaslServer run() throws SaslException  {
+              return Sasl.createSaslServer(mechanism, protocol, hostname,
+                                           SaslRpcServer.SASL_PROPS, callback);
+            }
+          });
       if (saslServer == null) {
         throw new AccessControlException(
             "Unable to find SASL server implementation for " + mechanism);

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystem.java?rev=1420375&r1=1420374&r2=1420375&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystem.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystem.java Tue Dec 11 20:08:00 2012
@@ -91,6 +91,17 @@ public abstract class MetricsSystem impl
   public abstract void register(Callback callback);
 
   /**
+   * Requests an immediate publish of all metrics from sources to sinks.
+   * 
+   * This is a "soft" request: the expectation is that a best effort will be
+   * done to synchronously snapshot the metrics from all the sources and put
+   * them in all the sinks (including flushing the sinks) before returning to
+   * the caller. If this can't be accomplished in reasonable time it's OK to
+   * return to the caller before everything is done. 
+   */
+  public abstract void publishMetricsNow();
+
+  /**
    * Shutdown the metrics system completely (usually during server shutdown.)
    * The MetricsSystemMXBean will be unregistered.
    * @return true if shutdown completed

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java?rev=1420375&r1=1420374&r2=1420375&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java Tue Dec 11 20:08:00 2012
@@ -19,6 +19,7 @@
 package org.apache.hadoop.metrics2.impl;
 
 import java.util.Random;
+import java.util.concurrent.*;
 
 import static com.google.common.base.Preconditions.*;
 
@@ -48,6 +49,7 @@ class MetricsSinkAdapter implements Sink
   private volatile boolean stopping = false;
   private volatile boolean inError = false;
   private final int period, firstRetryDelay, retryCount;
+  private final long oobPutTimeout;
   private final float retryBackoff;
   private final MetricsRegistry registry = new MetricsRegistry("sinkadapter");
   private final MutableStat latency;
@@ -69,6 +71,8 @@ class MetricsSinkAdapter implements Sink
     this.period = checkArg(period, period > 0, "period");
     firstRetryDelay = checkArg(retryDelay, retryDelay > 0, "retry delay");
     this.retryBackoff = checkArg(retryBackoff, retryBackoff>1, "retry backoff");
+    oobPutTimeout = (long)
+        (firstRetryDelay * Math.pow(retryBackoff, retryCount) * 1000);
     this.retryCount = retryCount;
     this.queue = new SinkQueue<MetricsBuffer>(checkArg(queueCapacity,
         queueCapacity > 0, "queue capacity"));
@@ -95,6 +99,23 @@ class MetricsSinkAdapter implements Sink
     }
     return true; // OK
   }
+  
+  public boolean putMetricsImmediate(MetricsBuffer buffer) {
+    WaitableMetricsBuffer waitableBuffer =
+        new WaitableMetricsBuffer(buffer);
+    if (!queue.enqueue(waitableBuffer)) {
+      LOG.warn(name + " has a full queue and can't consume the given metrics.");
+      dropped.incr();
+      return false;
+    }
+    if (!waitableBuffer.waitTillNotified(oobPutTimeout)) {
+      LOG.warn(name +
+          " couldn't fulfill an immediate putMetrics request in time." +
+          " Abandoning.");
+      return false;
+    }
+    return true;
+  }
 
   void publishMetricsFromQueue() {
     int retryDelay = firstRetryDelay;
@@ -158,6 +179,9 @@ class MetricsSinkAdapter implements Sink
       sink.flush();
       latency.add(Time.now() - ts);
     }
+    if (buffer instanceof WaitableMetricsBuffer) {
+      ((WaitableMetricsBuffer)buffer).notifyAnyWaiters();
+    }
     LOG.debug("Done");
   }
 
@@ -191,4 +215,26 @@ class MetricsSinkAdapter implements Sink
   MetricsSink sink() {
     return sink;
   }
+
+  static class WaitableMetricsBuffer extends MetricsBuffer {
+    private final Semaphore notificationSemaphore =
+        new Semaphore(0);
+
+    public WaitableMetricsBuffer(MetricsBuffer metricsBuffer) {
+      super(metricsBuffer);
+    }
+
+    public boolean waitTillNotified(long millisecondsToWait) {
+      try {
+        return notificationSemaphore.tryAcquire(millisecondsToWait,
+            TimeUnit.MILLISECONDS);
+      } catch (InterruptedException e) {
+        return false;
+      }
+    }
+
+    public void notifyAnyWaiters() {
+      notificationSemaphore.release();
+    }
+  }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java?rev=1420375&r1=1420374&r2=1420375&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java Tue Dec 11 20:08:00 2012
@@ -344,9 +344,19 @@ public class MetricsSystemImpl extends M
   synchronized void onTimerEvent() {
     logicalTime += period;
     if (sinks.size() > 0) {
-      publishMetrics(sampleMetrics());
+      publishMetrics(sampleMetrics(), false);
     }
   }
+  
+  /**
+   * Requests an immediate publish of all metrics from sources to sinks.
+   */
+  @Override
+  public void publishMetricsNow() {
+    if (sinks.size() > 0) {
+      publishMetrics(sampleMetrics(), true);
+    }    
+  }
 
   /**
    * Sample all the sources for a snapshot of metrics/tags
@@ -380,12 +390,20 @@ public class MetricsSystemImpl extends M
   /**
    * Publish a metrics snapshot to all the sinks
    * @param buffer  the metrics snapshot to publish
+   * @param immediate  indicates that we should publish metrics immediately
+   *                   instead of using a separate thread.
    */
-  synchronized void publishMetrics(MetricsBuffer buffer) {
+  synchronized void publishMetrics(MetricsBuffer buffer, boolean immediate) {
     int dropped = 0;
     for (MetricsSinkAdapter sa : sinks.values()) {
       long startTime = Time.now();
-      dropped += sa.putMetrics(buffer, logicalTime) ? 0 : 1;
+      boolean result;
+      if (immediate) {
+        result = sa.putMetricsImmediate(buffer); 
+      } else {
+        result = sa.putMetrics(buffer, logicalTime);
+      }
+      dropped += result ? 0 : 1;
       publishStat.add(Time.now() - startTime);
     }
     droppedPubAll.incr(dropped);

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1420375&r1=1420374&r2=1420375&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java Tue Dec 11 20:08:00 2012
@@ -299,13 +299,17 @@ public class UserGroupInformation {
   
   private static String OS_LOGIN_MODULE_NAME;
   private static Class<? extends Principal> OS_PRINCIPAL_CLASS;
-  private static final boolean windows = 
-                           System.getProperty("os.name").startsWith("Windows");
+  private static final boolean windows =
+      System.getProperty("os.name").startsWith("Windows");
+  private static final boolean is64Bit =
+      System.getProperty("os.arch").contains("64");
   /* Return the OS login module class name */
   private static String getOSLoginModuleName() {
     if (System.getProperty("java.vendor").contains("IBM")) {
-      return windows ? "com.ibm.security.auth.module.NTLoginModule"
-       : "com.ibm.security.auth.module.LinuxLoginModule";
+      return windows ? (is64Bit
+          ? "com.ibm.security.auth.module.Win64LoginModule"
+          : "com.ibm.security.auth.module.NTLoginModule")
+        : "com.ibm.security.auth.module.LinuxLoginModule";
     } else {
       return windows ? "com.sun.security.auth.module.NTLoginModule"
         : "com.sun.security.auth.module.UnixLoginModule";
@@ -319,13 +323,13 @@ public class UserGroupInformation {
     try {
       if (System.getProperty("java.vendor").contains("IBM")) {
         if (windows) {
-          return (Class<? extends Principal>)
-            cl.loadClass("com.ibm.security.auth.UsernamePrincipal");
+          return (Class<? extends Principal>) (is64Bit
+            ? cl.loadClass("com.ibm.security.auth.UsernamePrincipal")
+            : cl.loadClass("com.ibm.security.auth.NTUserPrincipal"));
         } else {
-          return (Class<? extends Principal>)
-            (System.getProperty("os.arch").contains("64")
-             ? cl.loadClass("com.ibm.security.auth.UsernamePrincipal")
-             : cl.loadClass("com.ibm.security.auth.LinuxPrincipal"));
+          return (Class<? extends Principal>) (is64Bit
+            ? cl.loadClass("com.ibm.security.auth.UsernamePrincipal")
+            : cl.loadClass("com.ibm.security.auth.LinuxPrincipal"));
         }
       } else {
         return (Class<? extends Principal>) (windows

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1415787-1420366

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java?rev=1420375&r1=1420374&r2=1420375&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java Tue Dec 11 20:08:00 2012
@@ -41,6 +41,9 @@ public abstract class FCStatisticsBaseTe
   
   //fc should be set appropriately by the deriving test.
   protected static FileContext fc = null;
+
+  private final FileContextTestHelper fileContextTestHelper =
+    new FileContextTestHelper();
   
   @Test
   public void testStatistics() throws IOException, URISyntaxException {
@@ -97,4 +100,8 @@ public abstract class FCStatisticsBaseTe
     }
     return URI.create(SchemeAuthString);
   }
+
+  protected Path getTestRootPath(FileContext fc, String pathString){
+    return fileContextTestHelper.getTestRootPath(fc, pathString);
+  }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java?rev=1420375&r1=1420374&r2=1420375&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java Tue Dec 11 20:08:00 2012
@@ -65,6 +65,8 @@ public abstract class FSMainOperationsBa
   
   
   protected static FileSystem fSys;
+
+  private final FileSystemTestHelper fileSystemTestHelper;
   
   final private static PathFilter DEFAULT_FILTER = new PathFilter() {
     @Override
@@ -73,6 +75,14 @@ public abstract class FSMainOperationsBa
     }
   };
 
+  public FSMainOperationsBaseTest() {
+    this(new FileSystemTestHelper());
+  }
+
+  public FSMainOperationsBaseTest(FileSystemTestHelper fileSystemTestHelper) {
+    this.fileSystemTestHelper = fileSystemTestHelper;
+  }
+
   //A test filter with returns any path containing a "b" 
   final private static PathFilter TEST_X_FILTER = new PathFilter() {
     @Override
@@ -1134,4 +1144,12 @@ public abstract class FSMainOperationsBa
       }
     return false;
  }
+  
+  protected Path getAbsoluteTestRootPath(FileSystem fSys) throws IOException {
+    return fileSystemTestHelper.getAbsoluteTestRootPath(fSys);
+  }
+
+  protected Path getTestRootPath(FileSystem fSys, String pathString) {
+    return fileSystemTestHelper.getTestRootPath(fSys, pathString);
+  }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextCreateMkdirBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextCreateMkdirBaseTest.java?rev=1420375&r1=1420374&r2=1420375&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextCreateMkdirBaseTest.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextCreateMkdirBaseTest.java Tue Dec 11 20:08:00 2012
@@ -52,6 +52,8 @@ import org.apache.commons.logging.impl.L
 public abstract class FileContextCreateMkdirBaseTest {
    
   protected static FileContext fc;
+
+  private final FileContextTestHelper fileContextTestHelper;
       
   {
     try {
@@ -63,6 +65,15 @@ public abstract class FileContextCreateM
     }
   }
   
+  public FileContextCreateMkdirBaseTest() {
+    this(new FileContextTestHelper());
+  }
+
+  public FileContextCreateMkdirBaseTest(
+      FileContextTestHelper fileContextTestHelper) {
+
+    this.fileContextTestHelper = fileContextTestHelper;
+  }
 
   @Before
   public void setUp() throws Exception {
@@ -127,7 +138,8 @@ public abstract class FileContextCreateM
   @Test
   public void testCreateNonRecursiveWithNonExistingDir() {
     try {
-      createFileNonRecursive(fc, getTestRootPath(fc, "NonExisting/foo"));
+      fileContextTestHelper.createFileNonRecursive(fc,
+        getTestRootPath(fc, "NonExisting/foo"));
       Assert.fail("Create with non existing parent dir should have failed");
     } catch (IOException e) {
       // As expected
@@ -149,4 +161,12 @@ public abstract class FileContextCreateM
     createFile(fc, f);
     Assert.assertTrue(isFile(fc, f));
   }
+
+  private Path getTestRootPath(FileContext fc) {
+    return fileContextTestHelper.getTestRootPath(fc);
+  }
+
+  private Path getTestRootPath(FileContext fc, String pathString) {
+    return fileContextTestHelper.getTestRootPath(fc, pathString);
+  }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java?rev=1420375&r1=1420374&r2=1420375&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java Tue Dec 11 20:08:00 2012
@@ -73,6 +73,8 @@ public abstract class FileContextMainOpe
   }
   
   protected static FileContext fc;
+
+  private final FileContextTestHelper fileContextTestHelper;
   
   final private static PathFilter DEFAULT_FILTER = new PathFilter() {
     @Override
@@ -95,6 +97,16 @@ public abstract class FileContextMainOpe
   private static byte[] data = getFileData(numBlocks,
       getDefaultBlockSize());
   
+  public FileContextMainOperationsBaseTest() {
+    this(new FileContextTestHelper());
+  }
+  
+  public FileContextMainOperationsBaseTest(
+      FileContextTestHelper fileContextTestHelper) {
+
+    this.fileContextTestHelper = fileContextTestHelper;
+  }
+
   @Before
   public void setUp() throws Exception {
     fc.mkdir(getTestRootPath(fc, "test"), FileContext.DEFAULT_PERM, true);
@@ -1190,4 +1202,12 @@ public abstract class FileContextMainOpe
       }
     return false;
  }
+
+  protected Path getAbsoluteTestRootPath(FileContext fc) throws IOException {
+    return fileContextTestHelper.getAbsoluteTestRootPath(fc);
+  }
+
+  protected Path getTestRootPath(FileContext fc, String pathString) {
+    return fileContextTestHelper.getTestRootPath(fc, pathString);
+  }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java?rev=1420375&r1=1420374&r2=1420375&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java Tue Dec 11 20:08:00 2012
@@ -70,6 +70,16 @@ public abstract class FileContextPermiss
   
   protected static FileContext fc;
 
+  private final FileContextTestHelper fileContextTestHelper;
+
+  public FileContextPermissionBase() {
+    this(new FileContextTestHelper());
+  }
+
+  public FileContextPermissionBase(FileContextTestHelper fileContextTestHelper) {
+    this.fileContextTestHelper = fileContextTestHelper;
+  }
+
   @Before
   public void setUp() throws Exception {
     fc.mkdir(getTestRootPath(fc), FileContext.DEFAULT_PERM, true);
@@ -94,7 +104,7 @@ public abstract class FileContextPermiss
     }
     String filename = "foo";
     Path f = getTestRootPath(fc, filename);
-    createFile(fc, filename);
+    fileContextTestHelper.createFile(fc, filename);
     doFilePermissionCheck(FileContext.DEFAULT_PERM.applyUMask(fc.getUMask()),
                         fc.getFileStatus(f).getPermission());
   }
@@ -109,7 +119,7 @@ public abstract class FileContextPermiss
 
     String filename = "foo";
     Path f = getTestRootPath(fc, filename);
-    createFile(fc, f);
+    fileContextTestHelper.createFile(fc, f);
 
     try {
       // create files and manipulate them.
@@ -211,4 +221,12 @@ public abstract class FileContextPermiss
   FsPermission getFileMask() {
     return FILE_MASK_ZERO;
   }
+
+  private Path getTestRootPath(FileContext fc) {
+    return fileContextTestHelper.getTestRootPath(fc);
+  }
+
+  private Path getTestRootPath(FileContext fc, String pathString) {
+    return fileContextTestHelper.getTestRootPath(fc, pathString);
+  }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextSymlinkBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextSymlinkBaseTest.java?rev=1420375&r1=1420374&r2=1420375&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextSymlinkBaseTest.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextSymlinkBaseTest.java Tue Dec 11 20:08:00 2012
@@ -45,6 +45,8 @@ public abstract class FileContextSymlink
  
   protected static FileContext fc;
 
+  private final FileContextTestHelper fileContextTestHelper;
+
   abstract protected String getScheme();
   abstract protected String testBaseDir1() throws IOException;
   abstract protected String testBaseDir2() throws IOException;
@@ -79,6 +81,16 @@ public abstract class FileContextSymlink
         CreateOpts.blockSize(blockSize));
   }
 
+  public FileContextSymlinkBaseTest() {
+    this(new FileContextTestHelper());
+  }
+
+  public FileContextSymlinkBaseTest(
+      FileContextTestHelper fileContextTestHelper) {
+
+    this.fileContextTestHelper = fileContextTestHelper;
+  }
+
   @Before
   public void setUp() throws Exception {
     fc.mkdir(new Path(testBaseDir1()), FileContext.DEFAULT_PERM, true);
@@ -1353,4 +1365,8 @@ public abstract class FileContextSymlink
       assertEquals(2, fc.getFileStatus(file).getModificationTime());
     }
   }
+
+  protected String getAbsoluteTestRootDir(FileContext fc) throws IOException {
+    return fileContextTestHelper.getAbsoluteTestRootDir(fc);
+  }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java?rev=1420375&r1=1420374&r2=1420375&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java Tue Dec 11 20:08:00 2012
@@ -25,6 +25,7 @@ import java.util.EnumSet;
 import org.apache.hadoop.fs.Options.CreateOpts;
 import org.apache.hadoop.fs.Options.CreateOpts.BlockSize;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.util.Shell;
 import org.junit.Assert;
 
 /**
@@ -32,15 +33,22 @@ import org.junit.Assert;
  */
 public final class FileContextTestHelper {
   // The test root is relative to the <wd>/build/test/data by default
-  public static final String TEST_ROOT_DIR = 
+  public static String TEST_ROOT_DIR = 
     System.getProperty("test.build.data", "build/test/data") + "/test";
   private static final int DEFAULT_BLOCK_SIZE = 1024;
   private static final int DEFAULT_NUM_BLOCKS = 2;
   private static String absTestRootDir = null;
 
-  /** Hidden constructor */
-  private FileContextTestHelper() {}
+  private final boolean stripDriveSpec;
+
+  public FileContextTestHelper() {
+    this(false);
+  }
   
+  public FileContextTestHelper(boolean stripDriveSpec) {
+    this.stripDriveSpec = stripDriveSpec;
+  }
+
   public static int getDefaultBlockSize() {
     return DEFAULT_BLOCK_SIZE;
   }
@@ -52,38 +60,40 @@ public final class FileContextTestHelper
     }
     return data;
   }
-  
-  public static Path getTestRootPath(FileContext fc) {
-    return fc.makeQualified(new Path(TEST_ROOT_DIR));
+
+  public Path getTestRootPath(FileContext fc) {
+    return fc.makeQualified(new Path(stripDriveSpec(TEST_ROOT_DIR,
+      stripDriveSpec)));
   }
 
-  public static Path getTestRootPath(FileContext fc, String pathString) {
-    return fc.makeQualified(new Path(TEST_ROOT_DIR, pathString));
+  public Path getTestRootPath(FileContext fc, String pathString) {
+    return fc.makeQualified(new Path(
+      stripDriveSpec(TEST_ROOT_DIR, stripDriveSpec), pathString));
   }
   
   
   // the getAbsolutexxx method is needed because the root test dir
   // can be messed up by changing the working dir.
 
-  public static String getAbsoluteTestRootDir(FileContext fc)
+  public String getAbsoluteTestRootDir(FileContext fc)
       throws IOException {
     if (absTestRootDir == null) {
-      if (new Path(TEST_ROOT_DIR).isAbsolute()) {
-        absTestRootDir = TEST_ROOT_DIR;
+      String testRootDir = stripDriveSpec(TEST_ROOT_DIR, stripDriveSpec);
+      if (new Path(testRootDir).isAbsolute()) {
+        absTestRootDir = testRootDir;
       } else {
         absTestRootDir = fc.getWorkingDirectory().toString() + "/"
-            + TEST_ROOT_DIR;
+            + testRootDir;
       }
     }
     return absTestRootDir;
   }
   
-  public static Path getAbsoluteTestRootPath(FileContext fc) throws IOException {
+  public Path getAbsoluteTestRootPath(FileContext fc) throws IOException {
     return fc.makeQualified(new Path(getAbsoluteTestRootDir(fc)));
   }
 
-  public static Path getDefaultWorkingDirectory(FileContext fc)
-      throws IOException {
+  public Path getDefaultWorkingDirectory(FileContext fc) throws IOException {
     return getTestRootPath(fc, "/user/" + System.getProperty("user.name"))
         .makeQualified(fc.getDefaultFileSystem().getUri(),
             fc.getWorkingDirectory());
@@ -116,12 +126,12 @@ public final class FileContextTestHelper
     return createFile(fc, path, DEFAULT_NUM_BLOCKS, CreateOpts.createParent());
   }
 
-  public static long createFile(FileContext fc, String name) throws IOException {
+  public long createFile(FileContext fc, String name) throws IOException {
     Path path = getTestRootPath(fc, name);
     return createFile(fc, path);
   }
   
-  public static long createFileNonRecursive(FileContext fc, String name)
+  public long createFileNonRecursive(FileContext fc, String name)
   throws IOException {
     Path path = getTestRootPath(fc, name);
     return createFileNonRecursive(fc, path);
@@ -190,14 +200,12 @@ public final class FileContextTestHelper
     return buffer;
   }
 
-  public static FileStatus containsPath(FileContext fc, Path path,
-      FileStatus[] dirList)
+  public FileStatus containsPath(FileContext fc, Path path, FileStatus[] dirList)
     throws IOException {
     return containsPath(getTestRootPath(fc, path.toString()), dirList);
   }
   
-  public static FileStatus containsPath(Path path,
-      FileStatus[] dirList)
+  public FileStatus containsPath(Path path, FileStatus[] dirList)
     throws IOException {
     for(int i = 0; i < dirList.length; i ++) { 
       if (path.equals(dirList[i].getPath()))
@@ -206,7 +214,7 @@ public final class FileContextTestHelper
     return null;
   }
   
-  public static FileStatus containsPath(FileContext fc, String path,
+  public FileStatus containsPath(FileContext fc, String path,
       FileStatus[] dirList)
      throws IOException {
     return containsPath(fc, new Path(path), dirList);
@@ -241,4 +249,15 @@ public final class FileContextTestHelper
     }
     Assert.assertEquals(aFc.makeQualified(new Path(path)), s.getPath());
   }
+
+  private static String stripDriveSpec(String pathString, boolean strip) {
+    if (strip && Shell.WINDOWS && pathString.length() >= 2 &&
+        Character.toUpperCase(pathString.charAt(0)) >= 'A' &&
+        Character.toUpperCase(pathString.charAt(0)) <= 'Z' &&
+        pathString.charAt(1) == ':') {
+
+      return pathString.substring(2);
+    }
+    return pathString;
+  }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextUtilBase.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextUtilBase.java?rev=1420375&r1=1420374&r2=1420375&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextUtilBase.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextUtilBase.java Tue Dec 11 20:08:00 2012
@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.fs;
 
-import static org.apache.hadoop.fs.FileContextTestHelper.getTestRootPath;
 import static org.apache.hadoop.fs.FileContextTestHelper.readFile;
 import static org.apache.hadoop.fs.FileContextTestHelper.writeFile;
 import static org.junit.Assert.assertTrue;
@@ -45,6 +44,8 @@ import org.junit.Test;
  */
 public abstract class FileContextUtilBase {
   protected FileContext fc;
+  private final FileContextTestHelper fileContextTestHelper =
+    new FileContextTestHelper();
   
   {
     try {
@@ -105,4 +106,12 @@ public abstract class FileContextUtilBas
     assertTrue("Copied files does not match ",Arrays.equals(ts.getBytes(),
         readFile(fc,file2,ts.getBytes().length)));
   }
+
+  private Path getTestRootPath(FileContext fc) {
+    return fileContextTestHelper.getTestRootPath(fc);
+  }
+
+  private Path getTestRootPath(FileContext fc, String pathString) {
+    return fileContextTestHelper.getTestRootPath(fc, pathString);
+  }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java?rev=1420375&r1=1420374&r2=1420375&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java Tue Dec 11 20:08:00 2012
@@ -25,6 +25,7 @@ import java.util.Random;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.util.Shell;
 import org.junit.Assert;
 import static org.junit.Assert.*;
 import static org.mockito.Mockito.mock;
@@ -34,15 +35,23 @@ import static org.mockito.Mockito.mock;
  */
 public final class FileSystemTestHelper {
   // The test root is relative to the <wd>/build/test/data by default
-  public static final String TEST_ROOT_DIR = 
+  public static String TEST_ROOT_DIR = 
     System.getProperty("test.build.data", "target/test/data") + "/test";
   private static final int DEFAULT_BLOCK_SIZE = 1024;
   private static final int DEFAULT_NUM_BLOCKS = 2;
   private static final short DEFAULT_NUM_REPL = 1;
   private static String absTestRootDir = null;
 
+  private final boolean stripDriveSpec;
+
   /** Hidden constructor */
-  private FileSystemTestHelper() {}
+  public FileSystemTestHelper() {
+    this(false);
+  }
+
+  public FileSystemTestHelper(boolean stripDriveSpec) {
+    this.stripDriveSpec = stripDriveSpec;
+  }
   
   public static void addFileSystemForTesting(URI uri, Configuration conf,
       FileSystem fs) throws IOException {
@@ -65,15 +74,17 @@ public final class FileSystemTestHelper 
   /*
    * get testRootPath qualified for fSys
    */
-  public static Path getTestRootPath(FileSystem fSys) {
-    return fSys.makeQualified(new Path(TEST_ROOT_DIR));
+  public Path getTestRootPath(FileSystem fSys) {
+    return fSys.makeQualified(new Path(stripDriveSpec(TEST_ROOT_DIR,
+      this.stripDriveSpec)));
   }
 
   /*
    * get testRootPath + pathString qualified for fSys
    */
-  public static Path getTestRootPath(FileSystem fSys, String pathString) {
-    return fSys.makeQualified(new Path(TEST_ROOT_DIR, pathString));
+  public Path getTestRootPath(FileSystem fSys, String pathString) {
+    return fSys.makeQualified(new Path(
+      stripDriveSpec(TEST_ROOT_DIR, this.stripDriveSpec), pathString));
   }
   
   
@@ -82,26 +93,25 @@ public final class FileSystemTestHelper 
   // is often relative to the working directory of process
   // running the unit tests.
 
-  static String getAbsoluteTestRootDir(FileSystem fSys)
-      throws IOException {
+  String getAbsoluteTestRootDir(FileSystem fSys) throws IOException {
     // NOTE: can't cache because of different filesystems!
     //if (absTestRootDir == null) 
-      if (new Path(TEST_ROOT_DIR).isAbsolute()) {
-        absTestRootDir = TEST_ROOT_DIR;
+      String testRootDir = stripDriveSpec(TEST_ROOT_DIR, this.stripDriveSpec);
+      if (new Path(testRootDir).isAbsolute()) {
+        absTestRootDir = testRootDir;
       } else {
         absTestRootDir = fSys.getWorkingDirectory().toString() + "/"
-            + TEST_ROOT_DIR;
+            + testRootDir;
       }
     //}
     return absTestRootDir;
   }
   
-  public static Path getAbsoluteTestRootPath(FileSystem fSys) throws IOException {
+  public Path getAbsoluteTestRootPath(FileSystem fSys) throws IOException {
     return fSys.makeQualified(new Path(getAbsoluteTestRootDir(fSys)));
   }
 
-  public static Path getDefaultWorkingDirectory(FileSystem fSys)
-      throws IOException {
+  public Path getDefaultWorkingDirectory(FileSystem fSys) throws IOException {
     return getTestRootPath(fSys, "/user/" + System.getProperty("user.name"))
         .makeQualified(fSys.getUri(),
             fSys.getWorkingDirectory());
@@ -136,7 +146,7 @@ public final class FileSystemTestHelper 
     return createFile(fSys, path, DEFAULT_NUM_BLOCKS, DEFAULT_BLOCK_SIZE, DEFAULT_NUM_REPL, true);
   }
 
-  public static long createFile(FileSystem fSys, String name) throws IOException {
+  public long createFile(FileSystem fSys, String name) throws IOException {
     Path path = getTestRootPath(fSys, name);
     return createFile(fSys, path);
   }
@@ -188,7 +198,7 @@ public final class FileSystemTestHelper 
     return s;
   }
 
-  public static FileStatus containsPath(FileSystem fSys, Path path,
+  public FileStatus containsPath(FileSystem fSys, Path path,
       FileStatus[] dirList)
     throws IOException {
     for(int i = 0; i < dirList.length; i ++) { 
@@ -199,8 +209,7 @@ public final class FileSystemTestHelper 
     return null;
   }
   
-  public static FileStatus containsPath(Path path,
-      FileStatus[] dirList)
+  public static FileStatus containsPath(Path path, FileStatus[] dirList)
     throws IOException {
     for(int i = 0; i < dirList.length; i ++) { 
       if (path.equals(dirList[i].getPath()))
@@ -210,7 +219,7 @@ public final class FileSystemTestHelper 
   }
   
   
-  public static FileStatus containsPath(FileSystem fSys, String path, FileStatus[] dirList)
+  public FileStatus containsPath(FileSystem fSys, String path, FileStatus[] dirList)
      throws IOException {
     return containsPath(fSys, new Path(path), dirList);
   }
@@ -264,4 +273,15 @@ public final class FileSystemTestHelper 
       return fs.getDelegationToken(renewer);
     }    
   }
+
+  private static String stripDriveSpec(String pathString, boolean strip) {
+    if (strip && Shell.WINDOWS && pathString.length() >= 2 &&
+        Character.toUpperCase(pathString.charAt(0)) >= 'A' &&
+        Character.toUpperCase(pathString.charAt(0)) <= 'Z' &&
+        pathString.charAt(1) == ':') {
+
+      return pathString.substring(2);
+    }
+    return pathString;
+  }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFSMainOperationsLocalFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFSMainOperationsLocalFileSystem.java?rev=1420375&r1=1420374&r2=1420375&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFSMainOperationsLocalFileSystem.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFSMainOperationsLocalFileSystem.java Tue Dec 11 20:08:00 2012
@@ -52,8 +52,7 @@ public class TestFSMainOperationsLocalFi
   @Test
   @Override
   public void testWDAbsolute() throws IOException {
-    Path absoluteDir = FileSystemTestHelper.getTestRootPath(fSys,
-        "test/existingDir");
+    Path absoluteDir = getTestRootPath(fSys, "test/existingDir");
     fSys.mkdirs(absoluteDir);
     fSys.setWorkingDirectory(absoluteDir);
     Assert.assertEquals(absoluteDir, fSys.getWorkingDirectory());

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java?rev=1420375&r1=1420374&r2=1420375&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java Tue Dec 11 20:08:00 2012
@@ -36,6 +36,9 @@ public class TestFileContextDeleteOnExit
   private static int numBlocks = 2;
   
   private FileContext fc;
+
+  private final FileContextTestHelper fileContextTestHelper =
+    new FileContextTestHelper();
   
   @Before
   public void setup() throws IOException {
@@ -86,4 +89,12 @@ public class TestFileContextDeleteOnExit
     Assert.assertFalse(exists(fc, file2));
     Assert.assertFalse(exists(fc, dir));
   }
+
+  private Path getTestRootPath(FileContext fc) {
+    return this.fileContextTestHelper.getTestRootPath(fc);
+  }
+
+  private Path getTestRootPath(FileContext fc, String pathString) {
+    return this.fileContextTestHelper.getTestRootPath(fc, pathString);
+  }
 }



Mime
View raw message