hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From szets...@apache.org
Subject svn commit: r1407706 - in /hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common: ./ src/main/conf/ src/main/docs/ src/main/java/ src/main/java/org/apache/hadoop/fs/ src/main/java/org/apache/hadoop/fs/viewfs/ src/main/java/org/apache/had...
Date Sat, 10 Nov 2012 00:49:22 GMT
Author: szetszwo
Date: Sat Nov 10 00:49:15 2012
New Revision: 1407706

URL: http://svn.apache.org/viewvc?rev=1407706&view=rev
Log:
Merge r1406415 through r1407703 from trunk.

Added:
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPlainServer.java
      - copied unchanged from r1407703, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPlainServer.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/site/apt/CLIMiniCluster.apt.vm
      - copied unchanged from r1407703, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/apt/CLIMiniCluster.apt.vm
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/site/apt/ClusterSetup.apt.vm
      - copied unchanged from r1407703, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/apt/ClusterSetup.apt.vm
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/site/apt/SingleCluster.apt.vm
      - copied unchanged from r1407703, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/apt/SingleCluster.apt.vm
Modified:
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/docs/   (props changed)
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/   (props changed)
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/core/   (props changed)
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1407706&r1=1407705&r2=1407706&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/CHANGES.txt Sat Nov 10 00:49:15 2012
@@ -272,10 +272,17 @@ Trunk (Unreleased)
     HADOOP-8918. test-patch.sh is parsing modified files wrong.
     (Raja Aluri via suresh)
 
+    HADOOP-8589 ViewFs tests fail when tests and home dirs are nested.
+    (sanjay Radia)
+
+    HADOOP-8974. TestDFVariations fails on Windows. (Chris Nauroth via suresh)
+
   OPTIMIZATIONS
 
     HADOOP-7761. Improve the performance of raw comparisons. (todd)
 
+    HADOOP-8589 ViewFs tests fail when tests and home dirs are nested (sanjay Radia)
+
 Release 2.0.3-alpha - Unreleased 
 
   INCOMPATIBLE CHANGES
@@ -285,6 +292,8 @@ Release 2.0.3-alpha - Unreleased 
     HADOOP-8597. Permit FsShell's text command to read Avro files.
     (Ivan Vladimirov Ivanov via cutting)
 
+    HADOOP-9020. Add a SASL PLAIN server (daryn via bobby)
+
   IMPROVEMENTS
 
     HADOOP-8789. Tests setLevel(Level.OFF) should be Level.ERROR.
@@ -342,6 +351,16 @@ Release 2.0.3-alpha - Unreleased 
     HADOOP-9010. Map UGI authenticationMethod to RPC authMethod (daryn via
     bobby)
 
+    HADOOP-9013. UGI should not hardcode loginUser's authenticationType (daryn
+    via bobby)
+
+    HADOOP-9014. Standardize creation of SaslRpcClients (daryn via bobby)
+
+    HADOOP-9015. Standardize creation of SaslRpcServers (daryn via bobby)
+
+    HADOOP-8860. Split MapReduce and YARN sections in documentation navigation.
+    (tomwhite via tucu)
+
   OPTIMIZATIONS
 
     HADOOP-8866. SampleQuantiles#query is O(N^2) instead of O(N). (Andrew Wang
@@ -400,6 +419,8 @@ Release 2.0.3-alpha - Unreleased 
 
     HADOOP-9012. IPC Client sends wrong connection context (daryn via bobby)
 
+    HADOOP-7115. Add a cache for getpwuid_r and getpwgid_r calls (tucu)
+
 Release 2.0.2-alpha - 2012-09-07 
 
   INCOMPATIBLE CHANGES

Propchange: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1406415-1407703

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties?rev=1407706&r1=1407705&r2=1407706&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties Sat Nov 10 00:49:15 2012
@@ -100,6 +100,13 @@ log4j.appender.TLA.layout=org.apache.log
 log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
 
 #
+# HDFS block state change log from block manager
+#
+# Uncomment the following to suppress normal block state change
+# messages from BlockManager in NameNode.
+#log4j.logger.BlockStateChange=WARN
+
+#
 #Security appender
 #
 hadoop.security.logger=INFO,NullAppender

Propchange: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1406415-1407703

Propchange: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1406415-1407703

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java?rev=1407706&r1=1407705&r2=1407706&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java Sat Nov 10 00:49:15 2012
@@ -184,5 +184,11 @@ public class CommonConfigurationKeys ext
    */
   public static final String KERBEROS_TICKET_CACHE_PATH =
       "hadoop.security.kerberos.ticket.cache.path";
-}
 
+  public static final String HADOOP_SECURITY_UID_NAME_CACHE_TIMEOUT_KEY =
+    "hadoop.security.uid.cache.secs";
+
+  public static final long HADOOP_SECURITY_UID_NAME_CACHE_TIMEOUT_DEFAULT =
+    4*60*60; // 4 hours
+
+}
\ No newline at end of file

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java?rev=1407706&r1=1407705&r2=1407706&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java Sat Nov 10 00:49:15 2012
@@ -125,6 +125,11 @@ public abstract class DelegateToFileSyst
   public FsServerDefaults getServerDefaults() throws IOException {
     return fsImpl.getServerDefaults();
   }
+  
+  @Override
+  public Path getHomeDirectory() {
+    return fsImpl.getHomeDirectory();
+  }
 
   @Override
   public int getUriDefaultPort() {

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java?rev=1407706&r1=1407705&r2=1407706&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java Sat Nov 10 00:49:15 2012
@@ -153,12 +153,6 @@ class ChRootedFileSystem extends FilterF
     return makeQualified(
         new Path(chRootPathPartString + f.toUri().toString()));
   }
-  
-  @Override
-  public Path getHomeDirectory() {
-    return  new Path("/user/"+System.getProperty("user.name")).makeQualified(
-          getUri(), null);
-  }
 
   @Override
   public Path getWorkingDirectory() {

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java?rev=1407706&r1=1407705&r2=1407706&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java Sat Nov 10 00:49:15 2012
@@ -256,8 +256,9 @@ public class ViewFileSystem extends File
       if (base == null) {
         base = "/user";
       }
-      homeDir = 
-        this.makeQualified(new Path(base + "/" + ugi.getShortUserName()));
+      homeDir = (base.equals("/") ? 
+          this.makeQualified(new Path(base + ugi.getShortUserName())):
+          this.makeQualified(new Path(base + "/" + ugi.getShortUserName())));
     }
     return homeDir;
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java?rev=1407706&r1=1407705&r2=1407706&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java Sat Nov 10 00:49:15 2012
@@ -248,8 +248,9 @@ public class ViewFs extends AbstractFile
       if (base == null) {
         base = "/user";
       }
-      homeDir = 
-        this.makeQualified(new Path(base + "/" + ugi.getShortUserName()));
+      homeDir = (base.equals("/") ? 
+        this.makeQualified(new Path(base + ugi.getShortUserName())):
+        this.makeQualified(new Path(base + "/" + ugi.getShortUserName())));
     }
     return homeDir;
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java?rev=1407706&r1=1407705&r2=1407706&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java Sat Nov 10 00:49:15 2012
@@ -120,7 +120,7 @@ public class SecureIOUtils {
     FileInputStream fis = new FileInputStream(f);
     boolean success = false;
     try {
-      Stat stat = NativeIO.fstat(fis.getFD());
+      Stat stat = NativeIO.getFstat(fis.getFD());
       checkStat(f, stat.getOwner(), stat.getGroup(), expectedOwner,
           expectedGroup);
       success = true;

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java?rev=1407706&r1=1407705&r2=1407706&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java Sat Nov 10 00:49:15 2012
@@ -19,8 +19,13 @@ package org.apache.hadoop.io.nativeio;
 
 import java.io.FileDescriptor;
 import java.io.IOException;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.util.NativeCodeLoader;
 
 import org.apache.commons.logging.Log;
@@ -30,6 +35,8 @@ import org.apache.commons.logging.LogFac
  * These functions should generally be used alongside a fallback to another
  * more portable mechanism.
  */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
 public class NativeIO {
   // Flags for open() call from bits/fcntl.h
   public static final int O_RDONLY   =    00;
@@ -86,6 +93,8 @@ public class NativeIO {
     "hadoop.workaround.non.threadsafe.getpwuid";
   static final boolean WORKAROUND_NON_THREADSAFE_CALLS_DEFAULT = false;
 
+  private static long cacheTimeout = -1;
+
   static {
     if (NativeCodeLoader.isNativeCodeLoaded()) {
       try {
@@ -96,6 +105,14 @@ public class NativeIO {
 
         initNative();
         nativeLoaded = true;
+
+        cacheTimeout = conf.getLong(
+          CommonConfigurationKeys.HADOOP_SECURITY_UID_NAME_CACHE_TIMEOUT_KEY,
+          CommonConfigurationKeys.HADOOP_SECURITY_UID_NAME_CACHE_TIMEOUT_DEFAULT) *
+          1000;
+        LOG.debug("Initialized cache for IDs to User/Group mapping with a" +
+          " cache timeout of " + cacheTimeout/1000 + " seconds.");
+
       } catch (Throwable t) {
         // This can happen if the user has an older version of libhadoop.so
         // installed - in this case we can continue without native IO
@@ -115,7 +132,7 @@ public class NativeIO {
   /** Wrapper around open(2) */
   public static native FileDescriptor open(String path, int flags, int mode) throws IOException;
   /** Wrapper around fstat(2) */
-  public static native Stat fstat(FileDescriptor fd) throws IOException;
+  private static native Stat fstat(FileDescriptor fd) throws IOException;
   /** Wrapper around chmod(2) */
   public static native void chmod(String path, int mode) throws IOException;
 
@@ -176,6 +193,7 @@ public class NativeIO {
    * Result type of the fstat call
    */
   public static class Stat {
+    private int ownerId, groupId;
     private String owner, group;
     private int mode;
 
@@ -196,9 +214,9 @@ public class NativeIO {
     public static final int S_IWUSR = 0000200;  /* write permission, owner */
     public static final int S_IXUSR = 0000100;  /* execute/search permission, owner */
 
-    Stat(String owner, String group, int mode) {
-      this.owner = owner;
-      this.group = group;
+    Stat(int ownerId, int groupId, int mode) {
+      this.ownerId = ownerId;
+      this.groupId = groupId;
       this.mode = mode;
     }
 
@@ -218,4 +236,61 @@ public class NativeIO {
       return mode;
     }
   }
+
+  static native String getUserName(int uid) throws IOException;
+
+  static native String getGroupName(int uid) throws IOException;
+
+  private static class CachedName {
+    final long timestamp;
+    final String name;
+
+    public CachedName(String name, long timestamp) {
+      this.name = name;
+      this.timestamp = timestamp;
+    }
+  }
+
+  private static final Map<Integer, CachedName> USER_ID_NAME_CACHE =
+    new ConcurrentHashMap<Integer, CachedName>();
+
+  private static final Map<Integer, CachedName> GROUP_ID_NAME_CACHE =
+    new ConcurrentHashMap<Integer, CachedName>();
+
+  private enum IdCache { USER, GROUP }
+
+  private static String getName(IdCache domain, int id) throws IOException {
+    Map<Integer, CachedName> idNameCache = (domain == IdCache.USER)
+      ? USER_ID_NAME_CACHE : GROUP_ID_NAME_CACHE;
+    String name;
+    CachedName cachedName = idNameCache.get(id);
+    long now = System.currentTimeMillis();
+    if (cachedName != null && (cachedName.timestamp + cacheTimeout) > now) {
+      name = cachedName.name;
+    } else {
+      name = (domain == IdCache.USER) ? getUserName(id) : getGroupName(id);
+      if (LOG.isDebugEnabled()) {
+        String type = (domain == IdCache.USER) ? "UserName" : "GroupName";
+        LOG.debug("Got " + type + " " + name + " for ID " + id +
+          " from the native implementation");
+      }
+      cachedName = new CachedName(name, now);
+      idNameCache.put(id, cachedName);
+    }
+    return name;
+  }
+
+  /**
+   * Returns the file stat for a file descriptor.
+   *
+   * @param fd file descriptor.
+   * @return the file descriptor file stat.
+   * @throws IOException thrown if there was an IO error while obtaining the file stat.
+   */
+  public static Stat getFstat(FileDescriptor fd) throws IOException {
+    Stat stat = fstat(fd);
+    stat.owner = getName(IdCache.USER, stat.ownerId);
+    stat.group = getName(IdCache.GROUP, stat.groupId);
+    return stat;
+  }
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1407706&r1=1407705&r2=1407706&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java Sat Nov 10 00:49:15 2012
@@ -57,6 +57,7 @@ import java.util.concurrent.BlockingQueu
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.LinkedBlockingQueue;
 
+import javax.security.auth.callback.CallbackHandler;
 import javax.security.sasl.Sasl;
 import javax.security.sasl.SaslException;
 import javax.security.sasl.SaslServer;
@@ -87,6 +88,7 @@ import org.apache.hadoop.security.SaslRp
 import org.apache.hadoop.security.SaslRpcServer.SaslGssCallbackHandler;
 import org.apache.hadoop.security.SaslRpcServer.SaslStatus;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.apache.hadoop.security.authorize.AuthorizationException;
 import org.apache.hadoop.security.authorize.PolicyProvider;
 import org.apache.hadoop.security.authorize.ProxyUsers;
@@ -1078,7 +1080,6 @@ public abstract class Server {
     
     IpcConnectionContextProto connectionContext;
     String protocolName;
-    boolean useSasl;
     SaslServer saslServer;
     private AuthMethod authMethod;
     private boolean saslContextEstablished;
@@ -1194,49 +1195,6 @@ public abstract class Server {
       if (!saslContextEstablished) {
         byte[] replyToken = null;
         try {
-          if (saslServer == null) {
-            switch (authMethod) {
-            case DIGEST:
-              if (secretManager == null) {
-                throw new AccessControlException(
-                    "Server is not configured to do DIGEST authentication.");
-              }
-              secretManager.checkAvailableForRead();
-              saslServer = Sasl.createSaslServer(AuthMethod.DIGEST
-                  .getMechanismName(), null, SaslRpcServer.SASL_DEFAULT_REALM,
-                  SaslRpcServer.SASL_PROPS, new SaslDigestCallbackHandler(
-                      secretManager, this));
-              break;
-            default:
-              UserGroupInformation current = UserGroupInformation
-                  .getCurrentUser();
-              String fullName = current.getUserName();
-              if (LOG.isDebugEnabled())
-                LOG.debug("Kerberos principal name is " + fullName);
-              final String names[] = SaslRpcServer.splitKerberosName(fullName);
-              if (names.length != 3) {
-                throw new AccessControlException(
-                    "Kerberos principal name does NOT have the expected "
-                        + "hostname part: " + fullName);
-              }
-              current.doAs(new PrivilegedExceptionAction<Object>() {
-                @Override
-                public Object run() throws SaslException {
-                  saslServer = Sasl.createSaslServer(AuthMethod.KERBEROS
-                      .getMechanismName(), names[0], names[1],
-                      SaslRpcServer.SASL_PROPS, new SaslGssCallbackHandler());
-                  return null;
-                }
-              });
-            }
-            if (saslServer == null)
-              throw new AccessControlException(
-                  "Unable to find SASL server implementation for "
-                      + authMethod.getMechanismName());
-            if (LOG.isDebugEnabled())
-              LOG.debug("Created SASL server with mechanism = "
-                  + authMethod.getMechanismName());
-          }
           if (LOG.isDebugEnabled())
             LOG.debug("Have read input token of size " + saslToken.length
                 + " for processing by saslServer.evaluateResponse()");
@@ -1375,38 +1333,27 @@ public abstract class Server {
           dataLengthBuffer.clear();
           if (authMethod == null) {
             throw new IOException("Unable to read authentication method");
-          }          
+          }
+          boolean useSaslServer = isSecurityEnabled;
           final boolean clientUsingSasl;
           switch (authMethod) {
             case SIMPLE: { // no sasl for simple
-              if (isSecurityEnabled) {
-                AccessControlException ae = new AccessControlException("Authorization ("
-                    + CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION
-                    + ") is enabled but authentication ("
-                    + CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION
-                    + ") is configured as simple. Please configure another method "
-                    + "like kerberos or digest.");
-                setupResponse(authFailedResponse, authFailedCall, RpcStatusProto.FATAL,
-                    null, ae.getClass().getName(), ae.getMessage());
-                responder.doRespond(authFailedCall);
-                throw ae;
-              }
               clientUsingSasl = false;
-              useSasl = false; 
               break;
             }
-            case DIGEST: {
+            case DIGEST: { // always allow tokens if there's a secret manager
+              useSaslServer |= (secretManager != null);
               clientUsingSasl = true;
-              useSasl = (secretManager != null);
               break;
             }
             default: {
               clientUsingSasl = true;
-              useSasl = isSecurityEnabled; 
               break;
             }
-          }          
-          if (clientUsingSasl && !useSasl) {
+          }
+          if (useSaslServer) {
+            saslServer = createSaslServer(authMethod);
+          } else if (clientUsingSasl) { // security is off
             doSaslReply(SaslStatus.SUCCESS, new IntWritable(
                 SaslRpcServer.SWITCH_TO_SIMPLE_AUTH), null, null);
             authMethod = AuthMethod.SIMPLE;
@@ -1448,7 +1395,7 @@ public abstract class Server {
             continue;
           }
           boolean isHeaderRead = connectionContextRead;
-          if (useSasl) {
+          if (saslServer != null) {
             saslReadAndProcess(data.array());
           } else {
             processOneRpc(data.array());
@@ -1462,6 +1409,84 @@ public abstract class Server {
       }
     }
 
+    private SaslServer createSaslServer(AuthMethod authMethod)
+        throws IOException {
+      try {
+        return createSaslServerInternal(authMethod);
+      } catch (IOException ioe) {
+        final String ioeClass = ioe.getClass().getName();
+        final String ioeMessage  = ioe.getLocalizedMessage();
+        if (authMethod == AuthMethod.SIMPLE) {
+          setupResponse(authFailedResponse, authFailedCall,
+              RpcStatusProto.FATAL, null, ioeClass, ioeMessage);
+          responder.doRespond(authFailedCall);
+        } else {
+          doSaslReply(SaslStatus.ERROR, null, ioeClass, ioeMessage);
+        }
+        throw ioe;
+      }
+    }
+
+    private SaslServer createSaslServerInternal(AuthMethod authMethod)
+        throws IOException {
+      SaslServer saslServer = null;
+      String hostname = null;
+      String saslProtocol = null;
+      CallbackHandler saslCallback = null;
+      
+      switch (authMethod) {
+        case SIMPLE: {
+          throw new AccessControlException("Authorization ("
+              + CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION
+              + ") is enabled but authentication ("
+              + CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION
+              + ") is configured as simple. Please configure another method "
+              + "like kerberos or digest.");
+        }
+        case DIGEST: {
+          if (secretManager == null) {
+            throw new AccessControlException(
+                "Server is not configured to do DIGEST authentication.");
+          }
+          secretManager.checkAvailableForRead();
+          hostname = SaslRpcServer.SASL_DEFAULT_REALM;
+          saslCallback = new SaslDigestCallbackHandler(secretManager, this);
+          break;
+        }
+        case KERBEROS: {
+          String fullName = UserGroupInformation.getCurrentUser().getUserName();
+          if (LOG.isDebugEnabled())
+            LOG.debug("Kerberos principal name is " + fullName);
+          KerberosName krbName = new KerberosName(fullName);
+          hostname = krbName.getHostName();
+          if (hostname == null) {
+            throw new AccessControlException(
+                "Kerberos principal name does NOT have the expected "
+                    + "hostname part: " + fullName);
+          }
+          saslProtocol = krbName.getServiceName();
+          saslCallback = new SaslGssCallbackHandler();
+          break;
+        }
+        default:
+          throw new AccessControlException(
+              "Server does not support SASL " + authMethod);
+      }
+      
+      String mechanism = authMethod.getMechanismName();
+      saslServer = Sasl.createSaslServer(
+          mechanism, saslProtocol, hostname,
+          SaslRpcServer.SASL_PROPS, saslCallback);
+      if (saslServer == null) {
+        throw new AccessControlException(
+            "Unable to find SASL server implementation for " + mechanism);
+      }
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Created SASL server with mechanism = " + mechanism);
+      }
+      return saslServer;
+    }
+    
     /**
      * Try to set up the response to indicate that the client version
      * is incompatible with the server. This can contain special-case
@@ -1523,7 +1548,7 @@ public abstract class Server {
           .getProtocol() : null;
 
       UserGroupInformation protocolUser = ProtoUtil.getUgi(connectionContext);
-      if (!useSasl) {
+      if (saslServer == null) {
         user = protocolUser;
         if (user != null) {
           user.setAuthenticationMethod(AuthMethod.SIMPLE);
@@ -1999,7 +2024,7 @@ public abstract class Server {
   
   private void wrapWithSasl(ByteArrayOutputStream response, Call call)
       throws IOException {
-    if (call.connection.useSasl) {
+    if (call.connection.saslServer != null) {
       byte[] token = response.toByteArray();
       // synchronization may be needed since there can be multiple Handler
       // threads using saslServer to wrap responses.

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java?rev=1407706&r1=1407705&r2=1407706&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java Sat Nov 10 00:49:15 2012
@@ -25,6 +25,7 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
+import java.util.Map;
 
 import javax.security.auth.callback.Callback;
 import javax.security.auth.callback.CallbackHandler;
@@ -45,6 +46,7 @@ import org.apache.hadoop.io.WritableUtil
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
 import org.apache.hadoop.security.SaslRpcServer.SaslStatus;
+import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
 
@@ -69,40 +71,48 @@ public class SaslRpcClient {
   public SaslRpcClient(AuthMethod method,
       Token<? extends TokenIdentifier> token, String serverPrincipal)
       throws IOException {
+    String saslUser = null;
+    String saslProtocol = null;
+    String saslServerName = null;
+    Map<String, String> saslProperties = SaslRpcServer.SASL_PROPS;
+    CallbackHandler saslCallback = null;
+    
     switch (method) {
-    case DIGEST:
-      if (LOG.isDebugEnabled())
-        LOG.debug("Creating SASL " + AuthMethod.DIGEST.getMechanismName()
-            + " client to authenticate to service at " + token.getService());
-      saslClient = Sasl.createSaslClient(new String[] { AuthMethod.DIGEST
-          .getMechanismName() }, null, null, SaslRpcServer.SASL_DEFAULT_REALM,
-          SaslRpcServer.SASL_PROPS, new SaslClientCallbackHandler(token));
-      break;
-    case KERBEROS:
-      if (LOG.isDebugEnabled()) {
-        LOG.debug("Creating SASL " + AuthMethod.KERBEROS.getMechanismName()
-            + " client. Server's Kerberos principal name is "
-            + serverPrincipal);
-      }
-      if (serverPrincipal == null || serverPrincipal.length() == 0) {
-        throw new IOException(
-            "Failed to specify server's Kerberos principal name");
-      }
-      String names[] = SaslRpcServer.splitKerberosName(serverPrincipal);
-      if (names.length != 3) {
-        throw new IOException(
-          "Kerberos principal name does NOT have the expected hostname part: "
-                + serverPrincipal);
-      }
-      saslClient = Sasl.createSaslClient(new String[] { AuthMethod.KERBEROS
-          .getMechanismName() }, null, names[0], names[1],
-          SaslRpcServer.SASL_PROPS, null);
-      break;
-    default:
-      throw new IOException("Unknown authentication method " + method);
+      case DIGEST: {
+        saslServerName = SaslRpcServer.SASL_DEFAULT_REALM;
+        saslCallback = new SaslClientCallbackHandler(token);
+        break;
+      }
+      case KERBEROS: {
+        if (serverPrincipal == null || serverPrincipal.isEmpty()) {
+          throw new IOException(
+              "Failed to specify server's Kerberos principal name");
+        }
+        KerberosName name = new KerberosName(serverPrincipal);
+        saslProtocol = name.getServiceName();
+        saslServerName = name.getHostName();
+        if (saslServerName == null) {
+          throw new IOException(
+              "Kerberos principal name does NOT have the expected hostname part: "
+                  + serverPrincipal);
+        }
+        break;
+      }
+      default:
+        throw new IOException("Unknown authentication method " + method);
+    }
+    
+    String mechanism = method.getMechanismName();
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("Creating SASL " + mechanism
+          + " client to authenticate to service at " + saslServerName);
     }
-    if (saslClient == null)
+    saslClient = Sasl.createSaslClient(
+        new String[] { mechanism }, saslUser, saslProtocol, saslServerName,
+        saslProperties, saslCallback);
+    if (saslClient == null) {
       throw new IOException("Unable to find SASL client implementation");
+    }
   }
 
   private static void readStatus(DataInputStream inStream) throws IOException {

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java?rev=1407706&r1=1407705&r2=1407706&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java Sat Nov 10 00:49:15 2012
@@ -23,6 +23,7 @@ import java.io.DataInput;
 import java.io.DataInputStream;
 import java.io.DataOutput;
 import java.io.IOException;
+import java.security.Security;
 import java.util.Map;
 import java.util.TreeMap;
 
@@ -89,6 +90,7 @@ public class SaslRpcServer {
     
     SASL_PROPS.put(Sasl.QOP, saslQOP.getSaslQop());
     SASL_PROPS.put(Sasl.SERVER_AUTH, "true");
+    Security.addProvider(new SaslPlainServer.SecurityProvider());
   }
   
   static String encodeIdentifier(byte[] identifier) {
@@ -138,7 +140,8 @@ public class SaslRpcServer {
   public static enum AuthMethod {
     SIMPLE((byte) 80, ""),
     KERBEROS((byte) 81, "GSSAPI"),
-    DIGEST((byte) 82, "DIGEST-MD5");
+    DIGEST((byte) 82, "DIGEST-MD5"),
+    PLAIN((byte) 83, "PLAIN");
 
     /** The code for this method. */
     public final byte code;

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1407706&r1=1407705&r2=1407706&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java Sat Nov 10 00:49:15 2012
@@ -238,14 +238,17 @@ public class UserGroupInformation {
    */
   private static synchronized void initUGI(Configuration conf) {
     AuthenticationMethod auth = SecurityUtil.getAuthenticationMethod(conf);
-    if (auth == AuthenticationMethod.SIMPLE) {
-      useKerberos = false;
-    } else if (auth == AuthenticationMethod.KERBEROS) {
-      useKerberos = true;
-    } else {
-      throw new IllegalArgumentException("Invalid attribute value for " +
-                                         HADOOP_SECURITY_AUTHENTICATION + 
-                                         " of " + auth);
+    switch (auth) {
+      case SIMPLE:
+        useKerberos = false;
+        break;
+      case KERBEROS:
+        useKerberos = true;
+        break;
+      default:
+        throw new IllegalArgumentException("Invalid attribute value for " +
+                                           HADOOP_SECURITY_AUTHENTICATION + 
+                                           " of " + auth);
     }
     try {
         kerberosMinSecondsBeforeRelogin = 1000L * conf.getLong(
@@ -637,19 +640,20 @@ public class UserGroupInformation {
       try {
         Subject subject = new Subject();
         LoginContext login;
+        AuthenticationMethod authenticationMethod;
         if (isSecurityEnabled()) {
+          authenticationMethod = AuthenticationMethod.KERBEROS;
           login = newLoginContext(HadoopConfiguration.USER_KERBEROS_CONFIG_NAME,
               subject, new HadoopConfiguration());
         } else {
+          authenticationMethod = AuthenticationMethod.SIMPLE;
           login = newLoginContext(HadoopConfiguration.SIMPLE_CONFIG_NAME, 
               subject, new HadoopConfiguration());
         }
         login.login();
         loginUser = new UserGroupInformation(subject);
         loginUser.setLogin(login);
-        loginUser.setAuthenticationMethod(isSecurityEnabled() ?
-                                          AuthenticationMethod.KERBEROS :
-                                          AuthenticationMethod.SIMPLE);
+        loginUser.setAuthenticationMethod(authenticationMethod);
         loginUser = new UserGroupInformation(login.getSubject());
         String fileLocation = System.getenv(HADOOP_TOKEN_FILE_LOCATION);
         if (fileLocation != null) {

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c?rev=1407706&r1=1407705&r2=1407706&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c Sat Nov 10 00:49:15 2012
@@ -72,16 +72,27 @@ static int workaround_non_threadsafe_cal
 static void stat_init(JNIEnv *env, jclass nativeio_class) {
   // Init Stat
   jclass clazz = (*env)->FindClass(env, "org/apache/hadoop/io/nativeio/NativeIO$Stat");
-  PASS_EXCEPTIONS(env);
+  if (!clazz) {
+    return; // exception has been raised
+  }
   stat_clazz = (*env)->NewGlobalRef(env, clazz);
+  if (!stat_clazz) {
+    return; // exception has been raised
+  }
   stat_ctor = (*env)->GetMethodID(env, stat_clazz, "<init>",
-    "(Ljava/lang/String;Ljava/lang/String;I)V");
-  
+    "(III)V");
+  if (!stat_ctor) {
+    return; // exception has been raised
+  }
   jclass obj_class = (*env)->FindClass(env, "java/lang/Object");
-  assert(obj_class != NULL);
+  if (!obj_class) {
+    return; // exception has been raised
+  }
   jmethodID  obj_ctor = (*env)->GetMethodID(env, obj_class,
     "<init>", "()V");
-  assert(obj_ctor != NULL);
+  if (!obj_ctor) {
+    return; // exception has been raised
+  }
 
   if (workaround_non_threadsafe_calls(env, nativeio_class)) {
     pw_lock_object = (*env)->NewObject(env, obj_class, obj_ctor);
@@ -158,8 +169,6 @@ Java_org_apache_hadoop_io_nativeio_Nativ
   JNIEnv *env, jclass clazz, jobject fd_object)
 {
   jobject ret = NULL;
-  char *pw_buf = NULL;
-  int pw_lock_locked = 0;
 
   int fd = fd_get(env, fd_object);
   PASS_EXCEPTIONS_GOTO(env, cleanup);
@@ -171,71 +180,14 @@ Java_org_apache_hadoop_io_nativeio_Nativ
     goto cleanup;
   }
 
-  size_t pw_buflen = get_pw_buflen();
-  if ((pw_buf = malloc(pw_buflen)) == NULL) {
-    THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for pw buffer");
-    goto cleanup;
-  }
-
-  if (pw_lock_object != NULL) {
-    if ((*env)->MonitorEnter(env, pw_lock_object) != JNI_OK) {
-      goto cleanup;
-    }
-    pw_lock_locked = 1;
-  }
-
-  // Grab username
-  struct passwd pwd, *pwdp;
-  while ((rc = getpwuid_r(s.st_uid, &pwd, pw_buf, pw_buflen, &pwdp)) != 0) {
-    if (rc != ERANGE) {
-      throw_ioe(env, rc);
-      goto cleanup;
-    }
-    free(pw_buf);
-    pw_buflen *= 2;
-    if ((pw_buf = malloc(pw_buflen)) == NULL) {
-      THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for pw buffer");
-      goto cleanup;
-    }
-  }
-  assert(pwdp == &pwd);
-
-  jstring jstr_username = (*env)->NewStringUTF(env, pwd.pw_name);
-  if (jstr_username == NULL) goto cleanup;
-
-  // Grab group
-  struct group grp, *grpp;
-  while ((rc = getgrgid_r(s.st_gid, &grp, pw_buf, pw_buflen, &grpp)) != 0) {
-    if (rc != ERANGE) {
-      throw_ioe(env, rc);
-      goto cleanup;
-    }
-    free(pw_buf);
-    pw_buflen *= 2;
-    if ((pw_buf = malloc(pw_buflen)) == NULL) {
-      THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for pw buffer");
-      goto cleanup;
-    }
-  }
-  assert(grpp == &grp);
-
-  jstring jstr_groupname = (*env)->NewStringUTF(env, grp.gr_name);
-  PASS_EXCEPTIONS_GOTO(env, cleanup);
-
   // Construct result
   ret = (*env)->NewObject(env, stat_clazz, stat_ctor,
-    jstr_username, jstr_groupname, s.st_mode);
+    (jint)s.st_uid, (jint)s.st_gid, (jint)s.st_mode);
 
 cleanup:
-  if (pw_buf != NULL) free(pw_buf);
-  if (pw_lock_locked) {
-    (*env)->MonitorExit(env, pw_lock_object);
-  }
   return ret;
 }
 
-
-
 /**
  * public static native void posix_fadvise(
  *   FileDescriptor fd, long offset, long len, int flags);
@@ -385,6 +337,128 @@ Java_org_apache_hadoop_io_nativeio_Nativ
   (*env)->ReleaseStringUTFChars(env, j_path, path);
 }
 
+/*
+ * static native String getUserName(int uid);
+ */
+JNIEXPORT jstring JNICALL 
+Java_org_apache_hadoop_io_nativeio_NativeIO_getUserName(JNIEnv *env, 
+jclass clazz, jint uid)
+{
+  int pw_lock_locked = 0;
+  if (pw_lock_object != NULL) {
+    if ((*env)->MonitorEnter(env, pw_lock_object) != JNI_OK) {
+      goto cleanup;
+    }
+    pw_lock_locked = 1;
+  }
+
+  char *pw_buf = NULL;
+  int rc;
+  size_t pw_buflen = get_pw_buflen();
+  if ((pw_buf = malloc(pw_buflen)) == NULL) {
+    THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for pw buffer");
+    goto cleanup;
+  }
+
+  // Grab username
+  struct passwd pwd, *pwdp;
+  while ((rc = getpwuid_r((uid_t)uid, &pwd, pw_buf, pw_buflen, &pwdp)) != 0) {
+    if (rc != ERANGE) {
+      throw_ioe(env, rc);
+      goto cleanup;
+    }
+    free(pw_buf);
+    pw_buflen *= 2;
+    if ((pw_buf = malloc(pw_buflen)) == NULL) {
+      THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for pw buffer");
+      goto cleanup;
+    }
+  }
+  if (pwdp == NULL) {
+    char msg[80];
+    snprintf(msg, sizeof(msg), "uid not found: %d", uid);
+    THROW(env, "java/io/IOException", msg);
+    goto cleanup;
+  }
+  if (pwdp != &pwd) {
+    char msg[80];
+    snprintf(msg, sizeof(msg), "pwd pointer inconsistent with reference. uid: %d", uid);
+    THROW(env, "java/lang/IllegalStateException", msg);
+    goto cleanup;
+  }
+
+  jstring jstr_username = (*env)->NewStringUTF(env, pwd.pw_name);
+
+cleanup:
+  if (pw_lock_locked) {
+    (*env)->MonitorExit(env, pw_lock_object);
+  }
+  if (pw_buf != NULL) free(pw_buf);
+  return jstr_username;
+}
+
+/*
+ * static native String getGroupName(int gid);
+ */
+JNIEXPORT jstring JNICALL 
+Java_org_apache_hadoop_io_nativeio_NativeIO_getGroupName(JNIEnv *env, 
+jclass clazz, jint gid)
+{
+  int pw_lock_locked = 0;
+ 
+  if (pw_lock_object != NULL) {
+    if ((*env)->MonitorEnter(env, pw_lock_object) != JNI_OK) {
+      goto cleanup;
+    }
+    pw_lock_locked = 1;
+  }
+  
+  char *pw_buf = NULL;
+  int rc;
+  size_t pw_buflen = get_pw_buflen();
+  if ((pw_buf = malloc(pw_buflen)) == NULL) {
+    THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for pw buffer");
+    goto cleanup;
+  }
+  
+  // Grab group
+  struct group grp, *grpp;
+  while ((rc = getgrgid_r((uid_t)gid, &grp, pw_buf, pw_buflen, &grpp)) != 0) {
+    if (rc != ERANGE) {
+      throw_ioe(env, rc);
+      goto cleanup;
+    }
+    free(pw_buf);
+    pw_buflen *= 2;
+    if ((pw_buf = malloc(pw_buflen)) == NULL) {
+      THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for pw buffer");
+      goto cleanup;
+    }
+  }
+  if (grpp == NULL) {
+    char msg[80];
+    snprintf(msg, sizeof(msg), "gid not found: %d", gid);
+    THROW(env, "java/io/IOException", msg);
+    goto cleanup;
+  }
+  if (grpp != &grp) {
+    char msg[80];
+    snprintf(msg, sizeof(msg), "pwd pointer inconsistent with reference. gid: %d", gid);
+    THROW(env, "java/lang/IllegalStateException", msg);
+    goto cleanup;
+  }
+
+  jstring jstr_groupname = (*env)->NewStringUTF(env, grp.gr_name);
+  PASS_EXCEPTIONS_GOTO(env, cleanup);
+  
+cleanup:
+  if (pw_lock_locked) {
+    (*env)->MonitorExit(env, pw_lock_object);
+  }
+  if (pw_buf != NULL) free(pw_buf);
+  return jstr_groupname;
+}
+
 
 /*
  * Throw a java.IO.IOException, generating the message from errno.

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml?rev=1407706&r1=1407705&r2=1407706&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml Sat Nov 10 00:49:15 2012
@@ -214,6 +214,17 @@
   </description>
 </property>
 
+
+<property>
+    <name>hadoop.security.uid.cache.secs</name>
+    <value>14400</value>
+    <description>
+        This is the config controlling the validity of the entries in the cache
+        containing the userId to userName and groupId to groupName used by
+        NativeIO getFstat().
+    </description>
+</property>
+
 <property>
   <name>hadoop.rpc.protection</name>
   <value>authentication</value>

Propchange: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1406415-1407703

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java?rev=1407706&r1=1407705&r2=1407706&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java Sat Nov 10 00:49:15 2012
@@ -61,19 +61,28 @@ public final class FileSystemTestHelper 
     return data;
   }
   
+  
+  /*
+   * get testRootPath qualified for fSys
+   */
   public static Path getTestRootPath(FileSystem fSys) {
     return fSys.makeQualified(new Path(TEST_ROOT_DIR));
   }
 
+  /*
+   * get testRootPath + pathString qualified for fSys
+   */
   public static Path getTestRootPath(FileSystem fSys, String pathString) {
     return fSys.makeQualified(new Path(TEST_ROOT_DIR, pathString));
   }
   
   
   // the getAbsolutexxx method is needed because the root test dir
-  // can be messed up by changing the working dir.
+  // can be messed up by changing the working dir since the TEST_ROOT_PATH
+  // is often relative to the working directory of process
+  // running the unit tests.
 
-  public static String getAbsoluteTestRootDir(FileSystem fSys)
+  static String getAbsoluteTestRootDir(FileSystem fSys)
       throws IOException {
     // NOTE: can't cache because of different filesystems!
     //if (absTestRootDir == null) 

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java?rev=1407706&r1=1407705&r2=1407706&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java Sat Nov 10 00:49:15 2012
@@ -23,6 +23,8 @@ import java.io.File;
 import java.io.IOException;
 import java.util.EnumSet;
 
+import org.apache.hadoop.util.Shell;
+
 public class TestDFVariations extends TestCase {
 
   public static class XXDF extends DF {
@@ -51,7 +53,9 @@ public class TestDFVariations extends Te
   public void testOSParsing() throws Exception {
     for (DF.OSType ost : EnumSet.allOf(DF.OSType.class)) {
       XXDF df = new XXDF(ost.getId());
-      assertEquals(ost.getId() + " mount", "/foo/bar", df.getMount());
+      assertEquals(ost.getId() + " mount",
+        Shell.WINDOWS ? df.getDirPath().substring(0, 2) : "/foo/bar",
+        df.getMount());
     }
   }
 

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java?rev=1407706&r1=1407705&r2=1407706&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java Sat Nov 10 00:49:15 2012
@@ -73,10 +73,10 @@ public class TestChRootedFileSystem {
     URI uri = fSys.getUri();
     Assert.assertEquals(chrootedTo.toUri(), uri);
     Assert.assertEquals(fSys.makeQualified(
-        new Path("/user/" + System.getProperty("user.name"))),
+        new Path(System.getProperty("user.home"))),
         fSys.getWorkingDirectory());
     Assert.assertEquals(fSys.makeQualified(
-        new Path("/user/" + System.getProperty("user.name"))),
+        new Path(System.getProperty("user.home"))),
         fSys.getHomeDirectory());
     /*
      * ChRootedFs as its uri like file:///chrootRoot.

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java?rev=1407706&r1=1407705&r2=1407706&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java Sat Nov 10 00:49:15 2012
@@ -70,10 +70,10 @@ public class TestChRootedFs {
     URI uri = fc.getDefaultFileSystem().getUri();
     Assert.assertEquals(chrootedTo.toUri(), uri);
     Assert.assertEquals(fc.makeQualified(
-        new Path("/user/" + System.getProperty("user.name"))),
+        new Path(System.getProperty("user.home"))),
         fc.getWorkingDirectory());
     Assert.assertEquals(fc.makeQualified(
-        new Path("/user/" + System.getProperty("user.name"))),
+        new Path(System.getProperty("user.home"))),
         fc.getHomeDirectory());
     /*
      * ChRootedFs as its uri like file:///chrootRoot.

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java?rev=1407706&r1=1407705&r2=1407706&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java Sat Nov 10 00:49:15 2012
@@ -39,44 +39,7 @@ public class TestFcMainOperationsLocalFs
   @Override
   @Before
   public void setUp() throws Exception {
-    /**
-     * create the test root on local_fs - the  mount table will point here
-     */
-    fclocal = FileContext.getLocalFSFileContext();
-    targetOfTests = FileContextTestHelper.getTestRootPath(fclocal);
-    // In case previous test was killed before cleanup
-    fclocal.delete(targetOfTests, true);
-    
-    fclocal.mkdir(targetOfTests, FileContext.DEFAULT_PERM, true);
-
-    
-    
-    
-    // We create mount table so that the test root on the viewFs points to 
-    // to the test root on the target.
-    // DOing this helps verify the FileStatus.path.
-    //
-    // The test root by default when running eclipse 
-    // is a test dir below the working directory. 
-    // (see FileContextTestHelper).
-    // Since viewFs has no built-in wd, its wd is /user/<username>.
-    // If this test launched via ant (build.xml) the test root is absolute path
-    
-    String srcTestRoot;
-    if (FileContextTestHelper.TEST_ROOT_DIR.startsWith("/")) {
-      srcTestRoot = FileContextTestHelper.TEST_ROOT_DIR;
-    } else {
-      srcTestRoot = "/user/"  + System.getProperty("user.name") + "/" +
-      FileContextTestHelper.TEST_ROOT_DIR;
-    }
-
-    Configuration conf = new Configuration();
-    ConfigUtil.addLink(conf, srcTestRoot,
-        targetOfTests.toUri());
-    
-    fc = FileContext.getFileContext(FsConstants.VIEWFS_URI, conf);
-    //System.out.println("SRCOfTests = "+ FileContextTestHelper.getTestRootPath(fc, "test"));
-    //System.out.println("TargetOfTests = "+ targetOfTests.toUri());
+    fc = ViewFsTestSetup.setupForViewFsLocalFs();
     super.setUp();
   }
   
@@ -84,6 +47,6 @@ public class TestFcMainOperationsLocalFs
   @After
   public void tearDown() throws Exception {
     super.tearDown();
-    fclocal.delete(targetOfTests, true);
+    ViewFsTestSetup.tearDownForViewFsLocalFs();
   }
-}
\ No newline at end of file
+}

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java?rev=1407706&r1=1407705&r2=1407706&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java Sat Nov 10 00:49:15 2012
@@ -17,7 +17,10 @@
  */
 package org.apache.hadoop.fs.viewfs;
 
+import java.net.URI;
+
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileContext;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystemTestHelper;
 import org.apache.hadoop.fs.FsConstants;
@@ -32,14 +35,19 @@ import org.mortbay.log.Log;
  * 
  * If tests launched via ant (build.xml) the test root is absolute path
  * If tests launched via eclipse, the test root is 
- * is a test dir below the working directory. (see FileSystemTestHelper).
- * Since viewFs has no built-in wd, its wd is /user/<username> 
- *          (or /User/<username> on mac)
+ * is a test dir below the working directory. (see FileContextTestHelper)
+ * 
+ * We set a viewFileSystems with 3 mount points: 
+ * 1) /<firstComponent>" of testdir  pointing to same in  target fs
+ * 2)   /<firstComponent>" of home  pointing to same in  target fs 
+ * 3)  /<firstComponent>" of wd  pointing to same in  target fs
+ * (note in many cases the link may be the same - viewFileSytem handles this)
  * 
- * We set a viewFileSystems with mount point for 
- * /<firstComponent>" pointing to the target fs's  testdir 
+ * We also set the view file system's wd to point to the wd. 
  */
 public class ViewFileSystemTestSetup {
+  
+  static public String ViewFSTestDir = "/testDir";
 
   /**
    * 
@@ -56,24 +64,26 @@ public class ViewFileSystemTestSetup {
     fsTarget.delete(targetOfTests, true);
     fsTarget.mkdirs(targetOfTests);
 
-    // Setup a link from viewfs to targetfs for the first component of
-    // path of testdir.
+
+    // Set up viewfs link for test dir as described above
     String testDir = FileSystemTestHelper.getTestRootPath(fsTarget).toUri()
         .getPath();
-    int indexOf2ndSlash = testDir.indexOf('/', 1);
-    String testDirFirstComponent = testDir.substring(0, indexOf2ndSlash);
-    ConfigUtil.addLink(conf, testDirFirstComponent, fsTarget.makeQualified(
-        new Path(testDirFirstComponent)).toUri());
-
-    // viewFs://home => fsTarget://home
-    String homeDirRoot = fsTarget.getHomeDirectory()
-        .getParent().toUri().getPath();
-    ConfigUtil.addLink(conf, homeDirRoot,
-        fsTarget.makeQualified(new Path(homeDirRoot)).toUri());
-    ConfigUtil.setHomeDirConf(conf, homeDirRoot);
-    Log.info("Home dir base " + homeDirRoot);
+    linkUpFirstComponents(conf, testDir, fsTarget, "test dir");
+    
+    
+    // Set up viewfs link for home dir as described above
+    setUpHomeDir(conf, fsTarget);
+    
+    
+    // the test path may be relative to working dir - we need to make that work:
+    // Set up viewfs link for wd as described above
+    String wdDir = fsTarget.getWorkingDirectory().toUri().getPath();
+    linkUpFirstComponents(conf, wdDir, fsTarget, "working dir");
+
 
     FileSystem fsView = FileSystem.get(FsConstants.VIEWFS_URI, conf);
+    fsView.setWorkingDirectory(new Path(wdDir)); // in case testdir relative to wd.
+    Log.info("Working dir is: " + fsView.getWorkingDirectory());
     return fsView;
   }
 
@@ -91,4 +101,33 @@ public class ViewFileSystemTestSetup {
     conf.set("fs.viewfs.impl", ViewFileSystem.class.getName());
     return conf; 
   }
+  
+  static void setUpHomeDir(Configuration conf, FileSystem fsTarget) {
+    String homeDir = fsTarget.getHomeDirectory().toUri().getPath();
+    int indexOf2ndSlash = homeDir.indexOf('/', 1);
+    if (indexOf2ndSlash >0) {
+      linkUpFirstComponents(conf, homeDir, fsTarget, "home dir");
+    } else { // home dir is at root. Just link the home dir itse
+      URI linkTarget = fsTarget.makeQualified(new Path(homeDir)).toUri();
+      ConfigUtil.addLink(conf, homeDir, linkTarget);
+      Log.info("Added link for home dir " + homeDir + "->" + linkTarget);
+    }
+    // Now set the root of the home dir for viewfs
+    String homeDirRoot = fsTarget.getHomeDirectory().getParent().toUri().getPath();
+    ConfigUtil.setHomeDirConf(conf, homeDirRoot);
+    Log.info("Home dir base for viewfs" + homeDirRoot);  
+  }
+  
+  /*
+   * Set up link in config for first component of path to the same
+   * in the target file system.
+   */
+  static void linkUpFirstComponents(Configuration conf, String path, FileSystem fsTarget, String info) {
+    int indexOf2ndSlash = path.indexOf('/', 1);
+    String firstComponent = path.substring(0, indexOf2ndSlash);
+    URI linkTarget = fsTarget.makeQualified(new Path(firstComponent)).toUri();
+    ConfigUtil.addLink(conf, firstComponent, linkTarget);
+    Log.info("Added link for " + info + " " 
+        + firstComponent + "->" + linkTarget);    
+  }
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java?rev=1407706&r1=1407705&r2=1407706&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java Sat Nov 10 00:49:15 2012
@@ -17,12 +17,15 @@
  */
 package org.apache.hadoop.fs.viewfs;
 
+import java.net.URI;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileContext;
 import org.apache.hadoop.fs.FileContextTestHelper;
 import org.apache.hadoop.fs.FsConstants;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.viewfs.ConfigUtil;
+import org.mortbay.log.Log;
 
 
 /**
@@ -31,13 +34,20 @@ import org.apache.hadoop.fs.viewfs.Confi
  * 
  * If tests launched via ant (build.xml) the test root is absolute path
  * If tests launched via eclipse, the test root is 
- * is a test dir below the working directory. (see FileContextTestHelper).
- * Since viewFs has no built-in wd, its wd is /user/<username>.
+ * is a test dir below the working directory. (see FileContextTestHelper)
+ * 
+ * We set a viewfs with 3 mount points: 
+ * 1) /<firstComponent>" of testdir  pointing to same in  target fs
+ * 2)   /<firstComponent>" of home  pointing to same in  target fs 
+ * 3)  /<firstComponent>" of wd  pointing to same in  target fs
+ * (note in many cases the link may be the same - viewfs handles this)
  * 
- * We set up fc to be the viewFs with mount point for 
- * /<firstComponent>" pointing to the local file system's testdir 
+ * We also set the view file system's wd to point to the wd.  
  */
+
 public class ViewFsTestSetup {
+  
+  static public String ViewFSTestDir = "/testDir";
 
 
    /* 
@@ -47,30 +57,31 @@ public class ViewFsTestSetup {
     /**
      * create the test root on local_fs - the  mount table will point here
      */
-    FileContext fclocal = FileContext.getLocalFSFileContext();
-    Path targetOfTests = FileContextTestHelper.getTestRootPath(fclocal);
+    FileContext fsTarget = FileContext.getLocalFSFileContext();
+    Path targetOfTests = FileContextTestHelper.getTestRootPath(fsTarget);
     // In case previous test was killed before cleanup
-    fclocal.delete(targetOfTests, true);
+    fsTarget.delete(targetOfTests, true);
     
-    fclocal.mkdir(targetOfTests, FileContext.DEFAULT_PERM, true);
-  
-    String srcTestFirstDir;
-    if (FileContextTestHelper.TEST_ROOT_DIR.startsWith("/")) {
-      int indexOf2ndSlash = FileContextTestHelper.TEST_ROOT_DIR.indexOf('/', 1);
-      srcTestFirstDir = FileContextTestHelper.TEST_ROOT_DIR.substring(0, indexOf2ndSlash);
-    } else {
-      srcTestFirstDir = "/user"; 
-  
-    }
-    //System.out.println("srcTestFirstDir=" + srcTestFirstDir);
-  
-    // Set up the defaultMT in the config with mount point links
-    // The test dir is root is below  /user/<userid>
+    fsTarget.mkdir(targetOfTests, FileContext.DEFAULT_PERM, true);
     Configuration conf = new Configuration();
-    ConfigUtil.addLink(conf, srcTestFirstDir,
-        targetOfTests.toUri());
+    
+    // Set up viewfs link for test dir as described above
+    String testDir = FileContextTestHelper.getTestRootPath(fsTarget).toUri()
+        .getPath();
+    linkUpFirstComponents(conf, testDir, fsTarget, "test dir");
+    
+    
+    // Set up viewfs link for home dir as described above
+    setUpHomeDir(conf, fsTarget);
+      
+    // the test path may be relative to working dir - we need to make that work:
+    // Set up viewfs link for wd as described above
+    String wdDir = fsTarget.getWorkingDirectory().toUri().getPath();
+    linkUpFirstComponents(conf, wdDir, fsTarget, "working dir");
     
     FileContext fc = FileContext.getFileContext(FsConstants.VIEWFS_URI, conf);
+    fc.setWorkingDirectory(new Path(wdDir)); // in case testdir relative to wd.
+    Log.info("Working dir is: " + fc.getWorkingDirectory());
     //System.out.println("SRCOfTests = "+ getTestRootPath(fc, "test"));
     //System.out.println("TargetOfTests = "+ targetOfTests.toUri());
     return fc;
@@ -85,5 +96,36 @@ public class ViewFsTestSetup {
     Path targetOfTests = FileContextTestHelper.getTestRootPath(fclocal);
     fclocal.delete(targetOfTests, true);
   }
+  
+  
+  static void setUpHomeDir(Configuration conf, FileContext fsTarget) {
+    String homeDir = fsTarget.getHomeDirectory().toUri().getPath();
+    int indexOf2ndSlash = homeDir.indexOf('/', 1);
+    if (indexOf2ndSlash >0) {
+      linkUpFirstComponents(conf, homeDir, fsTarget, "home dir");
+    } else { // home dir is at root. Just link the home dir itse
+      URI linkTarget = fsTarget.makeQualified(new Path(homeDir)).toUri();
+      ConfigUtil.addLink(conf, homeDir, linkTarget);
+      Log.info("Added link for home dir " + homeDir + "->" + linkTarget);
+    }
+    // Now set the root of the home dir for viewfs
+    String homeDirRoot = fsTarget.getHomeDirectory().getParent().toUri().getPath();
+    ConfigUtil.setHomeDirConf(conf, homeDirRoot);
+    Log.info("Home dir base for viewfs" + homeDirRoot);  
+  }
+  
+  /*
+   * Set up link in config for first component of path to the same
+   * in the target file system.
+   */
+  static void linkUpFirstComponents(Configuration conf, String path,
+      FileContext fsTarget, String info) {
+    int indexOf2ndSlash = path.indexOf('/', 1);
+    String firstComponent = path.substring(0, indexOf2ndSlash);
+    URI linkTarget = fsTarget.makeQualified(new Path(firstComponent)).toUri();
+    ConfigUtil.addLink(conf, firstComponent, linkTarget);
+    Log.info("Added link for " + info + " " 
+        + firstComponent + "->" + linkTarget);    
+  }
 
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java?rev=1407706&r1=1407705&r2=1407706&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java Sat Nov 10 00:49:15 2012
@@ -61,7 +61,7 @@ public class TestNativeIO {
   public void testFstat() throws Exception {
     FileOutputStream fos = new FileOutputStream(
       new File(TEST_DIR, "testfstat"));
-    NativeIO.Stat stat = NativeIO.fstat(fos.getFD());
+    NativeIO.Stat stat = NativeIO.getFstat(fos.getFD());
     fos.close();
     LOG.info("Stat: " + String.valueOf(stat));
 
@@ -93,7 +93,7 @@ public class TestNativeIO {
           long et = Time.now() + 5000;
           while (Time.now() < et) {
             try {
-              NativeIO.Stat stat = NativeIO.fstat(fos.getFD());
+              NativeIO.Stat stat = NativeIO.getFstat(fos.getFD());
               assertEquals(System.getProperty("user.name"), stat.getOwner());
               assertNotNull(stat.getGroup());
               assertTrue(!stat.getGroup().isEmpty());
@@ -125,7 +125,7 @@ public class TestNativeIO {
       new File(TEST_DIR, "testfstat2"));
     fos.close();
     try {
-      NativeIO.Stat stat = NativeIO.fstat(fos.getFD());
+      NativeIO.Stat stat = NativeIO.getFstat(fos.getFD());
     } catch (NativeIOException nioe) {
       LOG.info("Got expected exception", nioe);
       assertEquals(Errno.EBADF, nioe.getErrno());
@@ -283,4 +283,14 @@ public class TestNativeIO {
     assertEquals(expected, perms.toShort());
   }
 
+  @Test
+  public void testGetUserName() throws IOException {
+    assertFalse(NativeIO.getUserName(0).isEmpty());
+  }
+
+  @Test
+  public void testGetGroupName() throws IOException {
+    assertFalse(NativeIO.getGroupName(0).isEmpty());
+  }
+
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java?rev=1407706&r1=1407705&r2=1407706&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java Sat Nov 10 00:49:15 2012
@@ -27,12 +27,13 @@ import java.io.IOException;
 import java.lang.annotation.Annotation;
 import java.net.InetSocketAddress;
 import java.security.PrivilegedExceptionAction;
+import java.security.Security;
 import java.util.Collection;
 import java.util.Set;
 import java.util.regex.Pattern;
 
-import javax.security.sasl.Sasl;
-
+import javax.security.auth.callback.*;
+import javax.security.sasl.*;
 import junit.framework.Assert;
 
 import org.apache.commons.logging.Log;
@@ -43,14 +44,8 @@ import org.apache.hadoop.fs.CommonConfig
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.ipc.Client.ConnectionId;
 import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.KerberosInfo;
-import org.apache.hadoop.security.SaslInputStream;
-import org.apache.hadoop.security.SaslRpcClient;
-import org.apache.hadoop.security.SaslRpcServer;
-import org.apache.hadoop.security.SecurityInfo;
-import org.apache.hadoop.security.SecurityUtil;
-import org.apache.hadoop.security.TestUserGroupInformation;
-import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.*;
+import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
 import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.token.SecretManager;
 import org.apache.hadoop.security.token.Token;
@@ -58,8 +53,10 @@ import org.apache.hadoop.security.token.
 import org.apache.hadoop.security.token.TokenInfo;
 import org.apache.hadoop.security.token.TokenSelector;
 import org.apache.hadoop.security.token.SecretManager.InvalidToken;
+
 import org.apache.log4j.Level;
 import org.junit.Before;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
 /** Unit tests for using Sasl over RPC. */
@@ -74,14 +71,22 @@ public class TestSaslRPC {
   static final String SERVER_KEYTAB_KEY = "test.ipc.server.keytab";
   static final String SERVER_PRINCIPAL_1 = "p1/foo@BAR";
   static final String SERVER_PRINCIPAL_2 = "p2/foo@BAR";
-  
   private static Configuration conf;
+  static Boolean forceSecretManager = null;
+  
+  @BeforeClass
+  public static void setupKerb() {
+    System.setProperty("java.security.krb5.kdc", "");
+    System.setProperty("java.security.krb5.realm", "NONE");
+    Security.addProvider(new SaslPlainServer.SecurityProvider());
+  }    
 
   @Before
   public void setup() {
     conf = new Configuration();
     SecurityUtil.setAuthenticationMethod(KERBEROS, conf);
     UserGroupInformation.setConfiguration(conf);
+    forceSecretManager = null;
   }
 
   static {
@@ -266,16 +271,6 @@ public class TestSaslRPC {
   }
 
   @Test
-  public void testSecureToInsecureRpc() throws Exception {
-    SecurityUtil.setAuthenticationMethod(AuthenticationMethod.SIMPLE, conf);
-    Server server = new RPC.Builder(conf).setProtocol(TestSaslProtocol.class)
-        .setInstance(new TestSaslImpl()).setBindAddress(ADDRESS).setPort(0)
-        .setNumHandlers(5).setVerbose(true).build();
-    TestTokenSecretManager sm = new TestTokenSecretManager();
-    doDigestRpc(server, sm);
-  }
-  
-  @Test
   public void testErrorMessage() throws Exception {
     BadTokenSecretManager sm = new BadTokenSecretManager();
     final Server server = new RPC.Builder(conf)
@@ -455,6 +450,120 @@ public class TestSaslRPC {
     System.out.println("Test is successful.");
   }
 
+  @Test
+  public void testSaslPlainServer() throws IOException {
+    runNegotiation(
+        new TestPlainCallbacks.Client("user", "pass"),
+        new TestPlainCallbacks.Server("user", "pass"));
+  }
+
+  @Test
+  public void testSaslPlainServerBadPassword() throws IOException {
+    SaslException e = null;
+    try {
+      runNegotiation(
+          new TestPlainCallbacks.Client("user", "pass1"),
+          new TestPlainCallbacks.Server("user", "pass2"));
+    } catch (SaslException se) {
+      e = se;
+    }
+    assertNotNull(e);
+    assertEquals("PLAIN auth failed: wrong password", e.getMessage());
+  }
+
+
+  private void runNegotiation(CallbackHandler clientCbh,
+                              CallbackHandler serverCbh)
+                                  throws SaslException {
+    String mechanism = AuthMethod.PLAIN.getMechanismName();
+
+    SaslClient saslClient = Sasl.createSaslClient(
+        new String[]{ mechanism }, null, null, null, null, clientCbh);
+    assertNotNull(saslClient);
+
+    SaslServer saslServer = Sasl.createSaslServer(
+        mechanism, null, "localhost", null, serverCbh);
+    assertNotNull("failed to find PLAIN server", saslServer);
+    
+    byte[] response = saslClient.evaluateChallenge(new byte[0]);
+    assertNotNull(response);
+    assertTrue(saslClient.isComplete());
+
+    response = saslServer.evaluateResponse(response);
+    assertNull(response);
+    assertTrue(saslServer.isComplete());
+    assertNotNull(saslServer.getAuthorizationID());
+  }
+  
+  static class TestPlainCallbacks {
+    public static class Client implements CallbackHandler {
+      String user = null;
+      String password = null;
+      
+      Client(String user, String password) {
+        this.user = user;
+        this.password = password;
+      }
+      
+      @Override
+      public void handle(Callback[] callbacks)
+          throws UnsupportedCallbackException {
+        for (Callback callback : callbacks) {
+          if (callback instanceof NameCallback) {
+            ((NameCallback) callback).setName(user);
+          } else if (callback instanceof PasswordCallback) {
+            ((PasswordCallback) callback).setPassword(password.toCharArray());
+          } else {
+            throw new UnsupportedCallbackException(callback,
+                "Unrecognized SASL PLAIN Callback");
+          }
+        }
+      }
+    }
+    
+    public static class Server implements CallbackHandler {
+      String user = null;
+      String password = null;
+      
+      Server(String user, String password) {
+        this.user = user;
+        this.password = password;
+      }
+      
+      @Override
+      public void handle(Callback[] callbacks)
+          throws UnsupportedCallbackException, SaslException {
+        NameCallback nc = null;
+        PasswordCallback pc = null;
+        AuthorizeCallback ac = null;
+        
+        for (Callback callback : callbacks) {
+          if (callback instanceof NameCallback) {
+            nc = (NameCallback)callback;
+            assertEquals(user, nc.getName());
+          } else if (callback instanceof PasswordCallback) {
+            pc = (PasswordCallback)callback;
+            if (!password.equals(new String(pc.getPassword()))) {
+              throw new IllegalArgumentException("wrong password");
+            }
+          } else if (callback instanceof AuthorizeCallback) {
+            ac = (AuthorizeCallback)callback;
+            assertEquals(user, ac.getAuthorizationID());
+            assertEquals(user, ac.getAuthenticationID());
+            ac.setAuthorized(true);
+            ac.setAuthorizedID(ac.getAuthenticationID());
+          } else {
+            throw new UnsupportedCallbackException(callback,
+                "Unsupported SASL PLAIN Callback");
+          }
+        }
+        assertNotNull(nc);
+        assertNotNull(pc);
+        assertNotNull(ac);
+      }
+    }
+  }
+  
   private static Pattern BadToken =
       Pattern.compile(".*DIGEST-MD5: digest response format violation.*");
   private static Pattern KrbFailed =
@@ -462,6 +571,8 @@ public class TestSaslRPC {
                       "Failed to specify server's Kerberos principal name.*");
   private static Pattern Denied = 
       Pattern.compile(".*Authorization .* is enabled .*");
+  private static Pattern NoDigest =
+      Pattern.compile(".*Server is not configured to do DIGEST auth.*");
   
   /*
    *  simple server
@@ -478,6 +589,9 @@ public class TestSaslRPC {
     // Tokens are ignored because client is reverted to simple
     assertAuthEquals(SIMPLE, getAuthMethod(SIMPLE,   SIMPLE, true));
     assertAuthEquals(SIMPLE, getAuthMethod(KERBEROS, SIMPLE, true));
+    forceSecretManager = true;
+    assertAuthEquals(SIMPLE, getAuthMethod(SIMPLE,   SIMPLE, true));
+    assertAuthEquals(SIMPLE, getAuthMethod(KERBEROS, SIMPLE, true));
   }
     
   @Test
@@ -485,6 +599,9 @@ public class TestSaslRPC {
     // Tokens are ignored because client is reverted to simple
     assertAuthEquals(SIMPLE, getAuthMethod(SIMPLE,   SIMPLE, false));
     assertAuthEquals(SIMPLE, getAuthMethod(KERBEROS, SIMPLE, false));
+    forceSecretManager = true;
+    assertAuthEquals(SIMPLE, getAuthMethod(SIMPLE,   SIMPLE, false));
+    assertAuthEquals(SIMPLE, getAuthMethod(KERBEROS, SIMPLE, false));
   }
   
   /*
@@ -501,12 +618,19 @@ public class TestSaslRPC {
     // can use tokens regardless of auth
     assertAuthEquals(TOKEN, getAuthMethod(SIMPLE,   KERBEROS, true));
     assertAuthEquals(TOKEN, getAuthMethod(KERBEROS, KERBEROS, true));
+    // can't fallback to simple when using kerberos w/o tokens
+    forceSecretManager = false;
+    assertAuthEquals(NoDigest, getAuthMethod(SIMPLE,   KERBEROS, true));
+    assertAuthEquals(NoDigest, getAuthMethod(KERBEROS, KERBEROS, true));
   }
 
   @Test
   public void testKerberosServerWithInvalidTokens() throws Exception {
     assertAuthEquals(BadToken, getAuthMethod(SIMPLE,   KERBEROS, false));
     assertAuthEquals(BadToken, getAuthMethod(KERBEROS, KERBEROS, false));
+    forceSecretManager = false;
+    assertAuthEquals(NoDigest, getAuthMethod(SIMPLE,   KERBEROS, true));
+    assertAuthEquals(NoDigest, getAuthMethod(KERBEROS, KERBEROS, true));
   }
 
 
@@ -539,21 +663,45 @@ public class TestSaslRPC {
       final boolean useToken,
       final boolean useValidToken) throws Exception {
     
-    Configuration serverConf = new Configuration(conf);
+    String currentUser = UserGroupInformation.getCurrentUser().getUserName();
+    
+    final Configuration serverConf = new Configuration(conf);
     SecurityUtil.setAuthenticationMethod(serverAuth, serverConf);
     UserGroupInformation.setConfiguration(serverConf);
     
-    TestTokenSecretManager sm = new TestTokenSecretManager();
-    Server server = new RPC.Builder(serverConf).setProtocol(TestSaslProtocol.class)
+    final UserGroupInformation serverUgi =
+        UserGroupInformation.createRemoteUser(currentUser + "-SERVER");
+    serverUgi.setAuthenticationMethod(serverAuth);
+
+    final TestTokenSecretManager sm = new TestTokenSecretManager();
+    boolean useSecretManager = (serverAuth != SIMPLE);
+    if (forceSecretManager != null) {
+      useSecretManager &= forceSecretManager.booleanValue();
+    }
+    final SecretManager<?> serverSm = useSecretManager ? sm : null;
+    
+    Server server = serverUgi.doAs(new PrivilegedExceptionAction<Server>() {
+      @Override
+      public Server run() throws IOException {
+        Server server = new RPC.Builder(serverConf)
+        .setProtocol(TestSaslProtocol.class)
         .setInstance(new TestSaslImpl()).setBindAddress(ADDRESS).setPort(0)
         .setNumHandlers(5).setVerbose(true)
-        .setSecretManager((serverAuth != SIMPLE) ? sm : null)
+        .setSecretManager(serverSm)
         .build();      
-    server.start();
+        server.start();
+        return server;
+      }
+    });
 
+    final Configuration clientConf = new Configuration(conf);
+    SecurityUtil.setAuthenticationMethod(clientAuth, clientConf);
+    UserGroupInformation.setConfiguration(clientConf);
+    
     final UserGroupInformation clientUgi =
-        UserGroupInformation.createRemoteUser(
-            UserGroupInformation.getCurrentUser().getUserName()+"-CLIENT");
+        UserGroupInformation.createRemoteUser(currentUser + "-CLIENT");
+    clientUgi.setAuthenticationMethod(clientAuth);    
+
     final InetSocketAddress addr = NetUtils.getConnectAddress(server);
     if (useToken) {
       TestTokenIdentifier tokenId = new TestTokenIdentifier(
@@ -568,10 +716,6 @@ public class TestSaslRPC {
       clientUgi.addToken(token);
     }
 
-    final Configuration clientConf = new Configuration(conf);
-    SecurityUtil.setAuthenticationMethod(clientAuth, clientConf);
-    UserGroupInformation.setConfiguration(clientConf);
-    
     try {
       return clientUgi.doAs(new PrivilegedExceptionAction<String>() {
         @Override
@@ -581,6 +725,12 @@ public class TestSaslRPC {
             proxy = (TestSaslProtocol) RPC.getProxy(TestSaslProtocol.class,
                 TestSaslProtocol.versionID, addr, clientConf);
             
+            proxy.ping();
+            // verify sasl completed
+            if (serverAuth != SIMPLE) {
+              assertEquals(SaslRpcServer.SASL_PROPS.get(Sasl.QOP), "auth");
+            }
+            
             // make sure the other side thinks we are who we said we are!!!
             assertEquals(clientUgi.getUserName(), proxy.getAuthUser());
             return proxy.getAuthMethod().toString();



Mime
View raw message