hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sur...@apache.org
Subject svn commit: r1415815 [2/2] - in /hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common: ./ dev-support/ src/main/conf/ src/main/docs/ src/main/docs/src/documentation/content/xdocs/ src/main/java/ src/main/java/org/apache/hadoop/fs...
Date Fri, 30 Nov 2012 19:58:38 GMT
Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
(original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
Fri Nov 30 19:58:09 2012
@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.security;
 
-import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION;
 import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN;
 import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN_DEFAULT;
 
@@ -133,7 +132,7 @@ public class UserGroupInformation {
       }
       Principal user = null;
       // if we are using kerberos, try it out
-      if (useKerberos) {
+      if (isAuthenticationMethodEnabled(AuthenticationMethod.KERBEROS)) {
         user = getCanonicalUser(KerberosPrincipal.class);
         if (LOG.isDebugEnabled()) {
           LOG.debug("using kerberos user:"+user);
@@ -191,8 +190,8 @@ public class UserGroupInformation {
   static UgiMetrics metrics = UgiMetrics.create();
   /** Are the static variables that depend on configuration initialized? */
   private static boolean isInitialized = false;
-  /** Should we use Kerberos configuration? */
-  private static boolean useKerberos;
+  /** The auth method to use */
+  private static AuthenticationMethod authenticationMethod;
   /** Server-side groups fetching service */
   private static Groups groups;
   /** Min time (in seconds) before relogin for Kerberos */
@@ -237,19 +236,7 @@ public class UserGroupInformation {
    * @param conf the configuration to use
    */
   private static synchronized void initUGI(Configuration conf) {
-    AuthenticationMethod auth = SecurityUtil.getAuthenticationMethod(conf);
-    switch (auth) {
-      case SIMPLE:
-        useKerberos = false;
-        break;
-      case KERBEROS:
-        useKerberos = true;
-        break;
-      default:
-        throw new IllegalArgumentException("Invalid attribute value for " +
-                                           HADOOP_SECURITY_AUTHENTICATION + 
-                                           " of " + auth);
-    }
+    authenticationMethod = SecurityUtil.getAuthenticationMethod(conf);
     try {
         kerberosMinSecondsBeforeRelogin = 1000L * conf.getLong(
                 HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN,
@@ -287,8 +274,14 @@ public class UserGroupInformation {
    * @return true if UGI is working in a secure environment
    */
   public static boolean isSecurityEnabled() {
+    return !isAuthenticationMethodEnabled(AuthenticationMethod.SIMPLE);
+  }
+  
+  @InterfaceAudience.Private
+  @InterfaceStability.Evolving
+  private static boolean isAuthenticationMethodEnabled(AuthenticationMethod method) {
     ensureInitialized();
-    return useKerberos;
+    return (authenticationMethod == method);
   }
   
   /**
@@ -584,7 +577,7 @@ public class UserGroupInformation {
   @InterfaceStability.Evolving
   public static UserGroupInformation getUGIFromTicketCache(
             String ticketCache, String user) throws IOException {
-    if (!isSecurityEnabled()) {
+    if (!isAuthenticationMethodEnabled(AuthenticationMethod.KERBEROS)) {
       return getBestUGI(null, user);
     }
     try {
@@ -637,19 +630,12 @@ public class UserGroupInformation {
   public synchronized 
   static UserGroupInformation getLoginUser() throws IOException {
     if (loginUser == null) {
+      ensureInitialized();
       try {
         Subject subject = new Subject();
-        LoginContext login;
-        AuthenticationMethod authenticationMethod;
-        if (isSecurityEnabled()) {
-          authenticationMethod = AuthenticationMethod.KERBEROS;
-          login = newLoginContext(HadoopConfiguration.USER_KERBEROS_CONFIG_NAME,
-              subject, new HadoopConfiguration());
-        } else {
-          authenticationMethod = AuthenticationMethod.SIMPLE;
-          login = newLoginContext(HadoopConfiguration.SIMPLE_CONFIG_NAME, 
-              subject, new HadoopConfiguration());
-        }
+        LoginContext login =
+            newLoginContext(authenticationMethod.getLoginAppName(), 
+                            subject, new HadoopConfiguration());
         login.login();
         loginUser = new UserGroupInformation(subject);
         loginUser.setLogin(login);
@@ -674,6 +660,14 @@ public class UserGroupInformation {
     return loginUser;
   }
 
+  @InterfaceAudience.Private
+  @InterfaceStability.Unstable
+  synchronized static void setLoginUser(UserGroupInformation ugi) {
+    // if this is to become stable, should probably logout the currently
+    // logged in ugi if it's different
+    loginUser = ugi;
+  }
+  
   /**
    * Is this user logged in from a keytab file?
    * @return true if the credentials are from a keytab file.
@@ -1026,22 +1020,38 @@ public class UserGroupInformation {
   public static enum AuthenticationMethod {
     // currently we support only one auth per method, but eventually a 
     // subtype is needed to differentiate, ex. if digest is token or ldap
-    SIMPLE(AuthMethod.SIMPLE),
-    KERBEROS(AuthMethod.KERBEROS),
+    SIMPLE(AuthMethod.SIMPLE,
+        HadoopConfiguration.SIMPLE_CONFIG_NAME),
+    KERBEROS(AuthMethod.KERBEROS,
+        HadoopConfiguration.USER_KERBEROS_CONFIG_NAME),
     TOKEN(AuthMethod.DIGEST),
     CERTIFICATE(null),
     KERBEROS_SSL(null),
     PROXY(null);
     
     private final AuthMethod authMethod;
+    private final String loginAppName;
+    
     private AuthenticationMethod(AuthMethod authMethod) {
+      this(authMethod, null);
+    }
+    private AuthenticationMethod(AuthMethod authMethod, String loginAppName) {
       this.authMethod = authMethod;
+      this.loginAppName = loginAppName;
     }
     
     public AuthMethod getAuthMethod() {
       return authMethod;
     }
     
+    String getLoginAppName() {
+      if (loginAppName == null) {
+        throw new UnsupportedOperationException(
+            this + " login authentication is not supported");
+      }
+      return loginAppName;
+    }
+    
     public static AuthenticationMethod valueOf(AuthMethod authMethod) {
       for (AuthenticationMethod value : values()) {
         if (value.getAuthMethod() == authMethod) {
@@ -1333,7 +1343,21 @@ public class UserGroupInformation {
   public synchronized AuthenticationMethod getAuthenticationMethod() {
     return user.getAuthenticationMethod();
   }
-  
+
+  /**
+   * Get the authentication method from the real user's subject.  If there
+   * is no real user, return the given user's authentication method.
+   * 
+   * @return AuthenticationMethod in the subject, null if not present.
+   */
+  public synchronized AuthenticationMethod getRealAuthenticationMethod() {
+    UserGroupInformation ugi = getRealUser();
+    if (ugi == null) {
+      ugi = this;
+    }
+    return ugi.getAuthenticationMethod();
+  }
+
   /**
    * Returns the authentication method of a ugi. If the authentication method is
    * PROXY, returns the authentication method of the real user.

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
(original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
Fri Nov 30 19:58:09 2012
@@ -72,16 +72,27 @@ static int workaround_non_threadsafe_cal
 static void stat_init(JNIEnv *env, jclass nativeio_class) {
   // Init Stat
   jclass clazz = (*env)->FindClass(env, "org/apache/hadoop/io/nativeio/NativeIO$Stat");
-  PASS_EXCEPTIONS(env);
+  if (!clazz) {
+    return; // exception has been raised
+  }
   stat_clazz = (*env)->NewGlobalRef(env, clazz);
+  if (!stat_clazz) {
+    return; // exception has been raised
+  }
   stat_ctor = (*env)->GetMethodID(env, stat_clazz, "<init>",
-    "(Ljava/lang/String;Ljava/lang/String;I)V");
-  
+    "(III)V");
+  if (!stat_ctor) {
+    return; // exception has been raised
+  }
   jclass obj_class = (*env)->FindClass(env, "java/lang/Object");
-  assert(obj_class != NULL);
+  if (!obj_class) {
+    return; // exception has been raised
+  }
   jmethodID  obj_ctor = (*env)->GetMethodID(env, obj_class,
     "<init>", "()V");
-  assert(obj_ctor != NULL);
+  if (!obj_ctor) {
+    return; // exception has been raised
+  }
 
   if (workaround_non_threadsafe_calls(env, nativeio_class)) {
     pw_lock_object = (*env)->NewObject(env, obj_class, obj_ctor);
@@ -158,8 +169,6 @@ Java_org_apache_hadoop_io_nativeio_Nativ
   JNIEnv *env, jclass clazz, jobject fd_object)
 {
   jobject ret = NULL;
-  char *pw_buf = NULL;
-  int pw_lock_locked = 0;
 
   int fd = fd_get(env, fd_object);
   PASS_EXCEPTIONS_GOTO(env, cleanup);
@@ -171,71 +180,14 @@ Java_org_apache_hadoop_io_nativeio_Nativ
     goto cleanup;
   }
 
-  size_t pw_buflen = get_pw_buflen();
-  if ((pw_buf = malloc(pw_buflen)) == NULL) {
-    THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for pw buffer");
-    goto cleanup;
-  }
-
-  if (pw_lock_object != NULL) {
-    if ((*env)->MonitorEnter(env, pw_lock_object) != JNI_OK) {
-      goto cleanup;
-    }
-    pw_lock_locked = 1;
-  }
-
-  // Grab username
-  struct passwd pwd, *pwdp;
-  while ((rc = getpwuid_r(s.st_uid, &pwd, pw_buf, pw_buflen, &pwdp)) != 0) {
-    if (rc != ERANGE) {
-      throw_ioe(env, rc);
-      goto cleanup;
-    }
-    free(pw_buf);
-    pw_buflen *= 2;
-    if ((pw_buf = malloc(pw_buflen)) == NULL) {
-      THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for pw buffer");
-      goto cleanup;
-    }
-  }
-  assert(pwdp == &pwd);
-
-  jstring jstr_username = (*env)->NewStringUTF(env, pwd.pw_name);
-  if (jstr_username == NULL) goto cleanup;
-
-  // Grab group
-  struct group grp, *grpp;
-  while ((rc = getgrgid_r(s.st_gid, &grp, pw_buf, pw_buflen, &grpp)) != 0) {
-    if (rc != ERANGE) {
-      throw_ioe(env, rc);
-      goto cleanup;
-    }
-    free(pw_buf);
-    pw_buflen *= 2;
-    if ((pw_buf = malloc(pw_buflen)) == NULL) {
-      THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for pw buffer");
-      goto cleanup;
-    }
-  }
-  assert(grpp == &grp);
-
-  jstring jstr_groupname = (*env)->NewStringUTF(env, grp.gr_name);
-  PASS_EXCEPTIONS_GOTO(env, cleanup);
-
   // Construct result
   ret = (*env)->NewObject(env, stat_clazz, stat_ctor,
-    jstr_username, jstr_groupname, s.st_mode);
+    (jint)s.st_uid, (jint)s.st_gid, (jint)s.st_mode);
 
 cleanup:
-  if (pw_buf != NULL) free(pw_buf);
-  if (pw_lock_locked) {
-    (*env)->MonitorExit(env, pw_lock_object);
-  }
   return ret;
 }
 
-
-
 /**
  * public static native void posix_fadvise(
  *   FileDescriptor fd, long offset, long len, int flags);
@@ -385,6 +337,128 @@ Java_org_apache_hadoop_io_nativeio_Nativ
   (*env)->ReleaseStringUTFChars(env, j_path, path);
 }
 
+/*
+ * static native String getUserName(int uid);
+ */
+JNIEXPORT jstring JNICALL 
+Java_org_apache_hadoop_io_nativeio_NativeIO_getUserName(JNIEnv *env, 
+jclass clazz, jint uid)
+{
+  int pw_lock_locked = 0;
+  if (pw_lock_object != NULL) {
+    if ((*env)->MonitorEnter(env, pw_lock_object) != JNI_OK) {
+      goto cleanup;
+    }
+    pw_lock_locked = 1;
+  }
+
+  char *pw_buf = NULL;
+  int rc;
+  size_t pw_buflen = get_pw_buflen();
+  if ((pw_buf = malloc(pw_buflen)) == NULL) {
+    THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for pw buffer");
+    goto cleanup;
+  }
+
+  // Grab username
+  struct passwd pwd, *pwdp;
+  while ((rc = getpwuid_r((uid_t)uid, &pwd, pw_buf, pw_buflen, &pwdp)) != 0) {
+    if (rc != ERANGE) {
+      throw_ioe(env, rc);
+      goto cleanup;
+    }
+    free(pw_buf);
+    pw_buflen *= 2;
+    if ((pw_buf = malloc(pw_buflen)) == NULL) {
+      THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for pw buffer");
+      goto cleanup;
+    }
+  }
+  if (pwdp == NULL) {
+    char msg[80];
+    snprintf(msg, sizeof(msg), "uid not found: %d", uid);
+    THROW(env, "java/io/IOException", msg);
+    goto cleanup;
+  }
+  if (pwdp != &pwd) {
+    char msg[80];
+    snprintf(msg, sizeof(msg), "pwd pointer inconsistent with reference. uid: %d", uid);
+    THROW(env, "java/lang/IllegalStateException", msg);
+    goto cleanup;
+  }
+
+  jstring jstr_username = (*env)->NewStringUTF(env, pwd.pw_name);
+
+cleanup:
+  if (pw_lock_locked) {
+    (*env)->MonitorExit(env, pw_lock_object);
+  }
+  if (pw_buf != NULL) free(pw_buf);
+  return jstr_username;
+}
+
+/*
+ * static native String getGroupName(int gid);
+ */
+JNIEXPORT jstring JNICALL 
+Java_org_apache_hadoop_io_nativeio_NativeIO_getGroupName(JNIEnv *env, 
+jclass clazz, jint gid)
+{
+  int pw_lock_locked = 0;
+ 
+  if (pw_lock_object != NULL) {
+    if ((*env)->MonitorEnter(env, pw_lock_object) != JNI_OK) {
+      goto cleanup;
+    }
+    pw_lock_locked = 1;
+  }
+  
+  char *pw_buf = NULL;
+  int rc;
+  size_t pw_buflen = get_pw_buflen();
+  if ((pw_buf = malloc(pw_buflen)) == NULL) {
+    THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for pw buffer");
+    goto cleanup;
+  }
+  
+  // Grab group
+  struct group grp, *grpp;
+  while ((rc = getgrgid_r((uid_t)gid, &grp, pw_buf, pw_buflen, &grpp)) != 0) {
+    if (rc != ERANGE) {
+      throw_ioe(env, rc);
+      goto cleanup;
+    }
+    free(pw_buf);
+    pw_buflen *= 2;
+    if ((pw_buf = malloc(pw_buflen)) == NULL) {
+      THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for pw buffer");
+      goto cleanup;
+    }
+  }
+  if (grpp == NULL) {
+    char msg[80];
+    snprintf(msg, sizeof(msg), "gid not found: %d", gid);
+    THROW(env, "java/io/IOException", msg);
+    goto cleanup;
+  }
+  if (grpp != &grp) {
+    char msg[80];
+    snprintf(msg, sizeof(msg), "pwd pointer inconsistent with reference. gid: %d", gid);
+    THROW(env, "java/lang/IllegalStateException", msg);
+    goto cleanup;
+  }
+
+  jstring jstr_groupname = (*env)->NewStringUTF(env, grp.gr_name);
+  PASS_EXCEPTIONS_GOTO(env, cleanup);
+  
+cleanup:
+  if (pw_lock_locked) {
+    (*env)->MonitorExit(env, pw_lock_object);
+  }
+  if (pw_buf != NULL) free(pw_buf);
+  return jstr_groupname;
+}
+
 
 /*
  * Throw a java.IO.IOException, generating the message from errno.

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
(original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
Fri Nov 30 19:58:09 2012
@@ -214,6 +214,17 @@
   </description>
 </property>
 
+
+<property>
+    <name>hadoop.security.uid.cache.secs</name>
+    <value>14400</value>
+    <description>
+        This is the config controlling the validity of the entries in the cache
+        containing the userId to userName and groupId to groupName used by
+        NativeIO getFstat().
+    </description>
+</property>
+
 <property>
   <name>hadoop.rpc.protection</name>
   <value>authentication</value>

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1407223-1415786

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java
(original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java
Fri Nov 30 19:58:09 2012
@@ -23,11 +23,13 @@ import java.io.IOException;
 
 import junit.framework.TestCase;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
 
 /**
  * <p>
@@ -43,7 +45,7 @@ import org.apache.hadoop.fs.Path;
  * </p>
  */
 public abstract class FileSystemContractBaseTest extends TestCase {
-  
+  protected final static String TEST_UMASK = "062";
   protected FileSystem fs;
   protected byte[] data = new byte[getBlockSize() * 2]; // two blocks of data
   {
@@ -151,7 +153,26 @@ public abstract class FileSystemContract
     assertFalse(fs.exists(testDeepSubDir));
     
   }
-  
+
+  public void testMkdirsWithUmask() throws Exception {
+    if (fs.getScheme().equals("s3") || fs.getScheme().equals("s3n")) {
+      // skip permission tests for S3FileSystem until HDFS-1333 is fixed.
+      return;
+    }
+    Configuration conf = fs.getConf();
+    String oldUmask = conf.get(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY);
+    try {
+      conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, TEST_UMASK);
+      final Path dir = new Path("/test/newDir");
+      assertTrue(fs.mkdirs(dir, new FsPermission((short)0777)));
+      FileStatus status = fs.getFileStatus(dir);
+      assertTrue(status.isDirectory());
+      assertEquals((short)0715, status.getPermission().toShort());
+    } finally {
+      conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, oldUmask);
+    }
+  }
+
   public void testGetFileStatusThrowsExceptionForNonExistentFile() 
     throws Exception {
     try {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java
(original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java
Fri Nov 30 19:58:09 2012
@@ -22,6 +22,8 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.Collection;
+import java.util.Iterator;
+import java.util.NoSuchElementException;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.util.Shell;
@@ -32,6 +34,7 @@ import org.junit.runners.Parameterized.P
 import org.junit.Test;
 
 import static org.junit.Assert.*;
+import static org.junit.Assume.*;
 
 /** This test LocalDirAllocator works correctly;
  * Every test case uses different buffer dirs to
@@ -321,7 +324,7 @@ public class TestLocalDirAllocator {
    */
   @Test
   public void testNoSideEffects() throws IOException {
-    if (isWindows) return;
+    assumeTrue(!isWindows);
     String dir = buildBufferDir(ROOT, 0);
     try {
       conf.set(CONTEXT, dir);
@@ -343,8 +346,7 @@ public class TestLocalDirAllocator {
    */
   @Test
   public void testGetLocalPathToRead() throws IOException {
-    if (isWindows)
-      return;
+    assumeTrue(!isWindows);
     String dir = buildBufferDir(ROOT, 0);
     try {
       conf.set(CONTEXT, dir);
@@ -359,7 +361,60 @@ public class TestLocalDirAllocator {
                                                       BUFFER_DIR_ROOT));
       rmBufferDirs();
     }
+  }
 
+  /**
+   * Test that {@link LocalDirAllocator#getAllLocalPathsToRead(String, Configuration)} 
+   * returns correct filenames and "file" schema.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testGetAllLocalPathsToRead() throws IOException {
+    assumeTrue(!isWindows);
+    
+    String dir0 = buildBufferDir(ROOT, 0);
+    String dir1 = buildBufferDir(ROOT, 1);
+    try {
+      conf.set(CONTEXT, dir0 + "," + dir1);
+      assertTrue(localFs.mkdirs(new Path(dir0)));
+      assertTrue(localFs.mkdirs(new Path(dir1)));
+      
+      localFs.create(new Path(dir0 + Path.SEPARATOR + FILENAME));
+      localFs.create(new Path(dir1 + Path.SEPARATOR + FILENAME));
+
+      // check both the paths are returned as paths to read:  
+      final Iterable<Path> pathIterable = dirAllocator.getAllLocalPathsToRead(FILENAME,
conf);
+      int count = 0;
+      for (final Path p: pathIterable) {
+        count++;
+        assertEquals(FILENAME, p.getName());
+        assertEquals("file", p.getFileSystem(conf).getUri().getScheme());
+      }
+      assertEquals(2, count);
+
+      // test #next() while no element to iterate any more: 
+      try {
+        Path p = pathIterable.iterator().next();
+        assertFalse("NoSuchElementException must be thrown, but returned ["+p
+            +"] instead.", true); // exception expected
+      } catch (NoSuchElementException nsee) {
+        // okay
+      }
+      
+      // test modification not allowed:
+      final Iterable<Path> pathIterable2 = dirAllocator.getAllLocalPathsToRead(FILENAME,
conf);
+      final Iterator<Path> it = pathIterable2.iterator();
+      try {
+        it.remove();
+        assertFalse(true); // exception expected
+      } catch (UnsupportedOperationException uoe) {
+        // okay
+      }
+    } finally {
+      Shell.execCommand(new String[] { "chmod", "u+w", BUFFER_DIR_ROOT });
+      rmBufferDirs();
+    }
   }
   
   @Test

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathExceptions.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathExceptions.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathExceptions.java
(original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathExceptions.java
Fri Nov 30 19:58:09 2012
@@ -23,7 +23,7 @@ import static org.junit.Assert.assertEqu
 import java.io.IOException;
 
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.shell.PathExceptions.PathIOException;
+import org.apache.hadoop.fs.PathIOException;
 import org.junit.Test;
 
 public class TestPathExceptions {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java
(original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java
Fri Nov 30 19:58:09 2012
@@ -26,6 +26,7 @@ import org.apache.hadoop.fs.FileSystemTe
 import org.apache.hadoop.fs.FsConstants;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.viewfs.ConfigUtil;
+import org.apache.hadoop.util.Shell;
 import org.mortbay.log.Log;
 
 
@@ -123,8 +124,11 @@ public class ViewFileSystemTestSetup {
    * in the target file system.
    */
   static void linkUpFirstComponents(Configuration conf, String path, FileSystem fsTarget,
String info) {
-    int indexOf2ndSlash = path.indexOf('/', 1);
-    String firstComponent = path.substring(0, indexOf2ndSlash);
+    int indexOfEnd = path.indexOf('/', 1);
+    if (Shell.WINDOWS) {
+      indexOfEnd = path.indexOf('/', indexOfEnd + 1);
+    }
+    String firstComponent = path.substring(0, indexOfEnd);
     URI linkTarget = fsTarget.makeQualified(new Path(firstComponent)).toUri();
     ConfigUtil.addLink(conf, firstComponent, linkTarget);
     Log.info("Added link for " + info + " " 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java
(original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java
Fri Nov 30 19:58:09 2012
@@ -25,6 +25,7 @@ import org.apache.hadoop.fs.FileContextT
 import org.apache.hadoop.fs.FsConstants;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.viewfs.ConfigUtil;
+import org.apache.hadoop.util.Shell;
 import org.mortbay.log.Log;
 
 
@@ -120,8 +121,11 @@ public class ViewFsTestSetup {
    */
   static void linkUpFirstComponents(Configuration conf, String path,
       FileContext fsTarget, String info) {
-    int indexOf2ndSlash = path.indexOf('/', 1);
-    String firstComponent = path.substring(0, indexOf2ndSlash);
+    int indexOfEnd = path.indexOf('/', 1);
+    if (Shell.WINDOWS) {
+      indexOfEnd = path.indexOf('/', indexOfEnd + 1);
+    }
+    String firstComponent = path.substring(0, indexOfEnd);
     URI linkTarget = fsTarget.makeQualified(new Path(firstComponent)).toUri();
     ConfigUtil.addLink(conf, firstComponent, linkTarget);
     Log.info("Added link for " + info + " " 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
(original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
Fri Nov 30 19:58:09 2012
@@ -539,4 +539,17 @@ public class TestHttpServer extends Http
     }
     return server;
   }
+
+  @Test
+  public void testNoCacheHeader() throws Exception {
+    URL url = new URL(baseUrl, "/echo?a=b&c=d");
+    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+    assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
+    assertEquals("no-cache", conn.getHeaderField("Cache-Control"));
+    assertEquals("no-cache", conn.getHeaderField("Pragma"));
+    assertNotNull(conn.getHeaderField("Expires"));
+    assertNotNull(conn.getHeaderField("Date"));
+    assertEquals(conn.getHeaderField("Expires"), conn.getHeaderField("Date"));
+  }
+
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
(original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
Fri Nov 30 19:58:09 2012
@@ -61,7 +61,7 @@ public class TestNativeIO {
   public void testFstat() throws Exception {
     FileOutputStream fos = new FileOutputStream(
       new File(TEST_DIR, "testfstat"));
-    NativeIO.Stat stat = NativeIO.fstat(fos.getFD());
+    NativeIO.Stat stat = NativeIO.getFstat(fos.getFD());
     fos.close();
     LOG.info("Stat: " + String.valueOf(stat));
 
@@ -93,7 +93,7 @@ public class TestNativeIO {
           long et = Time.now() + 5000;
           while (Time.now() < et) {
             try {
-              NativeIO.Stat stat = NativeIO.fstat(fos.getFD());
+              NativeIO.Stat stat = NativeIO.getFstat(fos.getFD());
               assertEquals(System.getProperty("user.name"), stat.getOwner());
               assertNotNull(stat.getGroup());
               assertTrue(!stat.getGroup().isEmpty());
@@ -125,7 +125,7 @@ public class TestNativeIO {
       new File(TEST_DIR, "testfstat2"));
     fos.close();
     try {
-      NativeIO.Stat stat = NativeIO.fstat(fos.getFD());
+      NativeIO.Stat stat = NativeIO.getFstat(fos.getFD());
     } catch (NativeIOException nioe) {
       LOG.info("Got expected exception", nioe);
       assertEquals(Errno.EBADF, nioe.getErrno());
@@ -283,4 +283,14 @@ public class TestNativeIO {
     assertEquals(expected, perms.toShort());
   }
 
+  @Test
+  public void testGetUserName() throws IOException {
+    assertFalse(NativeIO.getUserName(0).isEmpty());
+  }
+
+  @Test
+  public void testGetGroupName() throws IOException {
+    assertFalse(NativeIO.getGroupName(0).isEmpty());
+  }
+
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java
(original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java
Fri Nov 30 19:58:09 2012
@@ -27,12 +27,13 @@ import java.io.IOException;
 import java.lang.annotation.Annotation;
 import java.net.InetSocketAddress;
 import java.security.PrivilegedExceptionAction;
+import java.security.Security;
 import java.util.Collection;
 import java.util.Set;
 import java.util.regex.Pattern;
 
-import javax.security.sasl.Sasl;
-
+import javax.security.auth.callback.*;
+import javax.security.sasl.*;
 import junit.framework.Assert;
 
 import org.apache.commons.logging.Log;
@@ -44,6 +45,7 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.ipc.Client.ConnectionId;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.*;
+import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
 import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.token.SecretManager;
 import org.apache.hadoop.security.token.Token;
@@ -53,7 +55,6 @@ import org.apache.hadoop.security.token.
 import org.apache.hadoop.security.token.SecretManager.InvalidToken;
 
 import org.apache.log4j.Level;
-import org.apache.tools.ant.types.Assertions.EnabledAssertion;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -76,7 +77,8 @@ public class TestSaslRPC {
   @BeforeClass
   public static void setupKerb() {
     System.setProperty("java.security.krb5.kdc", "");
-    System.setProperty("java.security.krb5.realm", "NONE"); 
+    System.setProperty("java.security.krb5.realm", "NONE");
+    Security.addProvider(new SaslPlainServer.SecurityProvider());
   }    
 
   @Before
@@ -448,15 +450,132 @@ public class TestSaslRPC {
     System.out.println("Test is successful.");
   }
 
+  @Test
+  public void testSaslPlainServer() throws IOException {
+    runNegotiation(
+        new TestPlainCallbacks.Client("user", "pass"),
+        new TestPlainCallbacks.Server("user", "pass"));
+  }
+
+  @Test
+  public void testSaslPlainServerBadPassword() throws IOException {
+    SaslException e = null;
+    try {
+      runNegotiation(
+          new TestPlainCallbacks.Client("user", "pass1"),
+          new TestPlainCallbacks.Server("user", "pass2"));
+    } catch (SaslException se) {
+      e = se;
+    }
+    assertNotNull(e);
+    assertEquals("PLAIN auth failed: wrong password", e.getMessage());
+  }
+
+
+  private void runNegotiation(CallbackHandler clientCbh,
+                              CallbackHandler serverCbh)
+                                  throws SaslException {
+    String mechanism = AuthMethod.PLAIN.getMechanismName();
+
+    SaslClient saslClient = Sasl.createSaslClient(
+        new String[]{ mechanism }, null, null, null, null, clientCbh);
+    assertNotNull(saslClient);
+
+    SaslServer saslServer = Sasl.createSaslServer(
+        mechanism, null, "localhost", null, serverCbh);
+    assertNotNull("failed to find PLAIN server", saslServer);
+    
+    byte[] response = saslClient.evaluateChallenge(new byte[0]);
+    assertNotNull(response);
+    assertTrue(saslClient.isComplete());
+
+    response = saslServer.evaluateResponse(response);
+    assertNull(response);
+    assertTrue(saslServer.isComplete());
+    assertNotNull(saslServer.getAuthorizationID());
+  }
+  
+  static class TestPlainCallbacks {
+    public static class Client implements CallbackHandler {
+      String user = null;
+      String password = null;
+      
+      Client(String user, String password) {
+        this.user = user;
+        this.password = password;
+      }
+      
+      @Override
+      public void handle(Callback[] callbacks)
+          throws UnsupportedCallbackException {
+        for (Callback callback : callbacks) {
+          if (callback instanceof NameCallback) {
+            ((NameCallback) callback).setName(user);
+          } else if (callback instanceof PasswordCallback) {
+            ((PasswordCallback) callback).setPassword(password.toCharArray());
+          } else {
+            throw new UnsupportedCallbackException(callback,
+                "Unrecognized SASL PLAIN Callback");
+          }
+        }
+      }
+    }
+    
+    public static class Server implements CallbackHandler {
+      String user = null;
+      String password = null;
+      
+      Server(String user, String password) {
+        this.user = user;
+        this.password = password;
+      }
+      
+      @Override
+      public void handle(Callback[] callbacks)
+          throws UnsupportedCallbackException, SaslException {
+        NameCallback nc = null;
+        PasswordCallback pc = null;
+        AuthorizeCallback ac = null;
+        
+        for (Callback callback : callbacks) {
+          if (callback instanceof NameCallback) {
+            nc = (NameCallback)callback;
+            assertEquals(user, nc.getName());
+          } else if (callback instanceof PasswordCallback) {
+            pc = (PasswordCallback)callback;
+            if (!password.equals(new String(pc.getPassword()))) {
+              throw new IllegalArgumentException("wrong password");
+            }
+          } else if (callback instanceof AuthorizeCallback) {
+            ac = (AuthorizeCallback)callback;
+            assertEquals(user, ac.getAuthorizationID());
+            assertEquals(user, ac.getAuthenticationID());
+            ac.setAuthorized(true);
+            ac.setAuthorizedID(ac.getAuthenticationID());
+          } else {
+            throw new UnsupportedCallbackException(callback,
+                "Unsupported SASL PLAIN Callback");
+          }
+        }
+        assertNotNull(nc);
+        assertNotNull(pc);
+        assertNotNull(ac);
+      }
+    }
+  }
+  
   private static Pattern BadToken =
       Pattern.compile(".*DIGEST-MD5: digest response format violation.*");
   private static Pattern KrbFailed =
       Pattern.compile(".*Failed on local exception:.* " +
                       "Failed to specify server's Kerberos principal name.*");
-  private static Pattern Denied = 
-      Pattern.compile(".*Authorization .* is enabled .*");
-  private static Pattern NoDigest =
-      Pattern.compile(".*Server is not configured to do DIGEST auth.*");
+  private static Pattern Denied(AuthenticationMethod method) {
+      return Pattern.compile(".*RemoteException.*AccessControlException.*: "
+          +method.getAuthMethod() + " authentication is not enabled.*");
+  }
+  private static Pattern NoTokenAuth =
+      Pattern.compile(".*IllegalArgumentException: " +
+                      "TOKEN authentication requires a secret manager");
   
   /*
    *  simple server
@@ -489,12 +608,39 @@ public class TestSaslRPC {
   }
   
   /*
+   *  token server
+   */
+  @Test
+  public void testTokenOnlyServer() throws Exception {
+    assertAuthEquals(Denied(SIMPLE), getAuthMethod(SIMPLE,   TOKEN));
+    assertAuthEquals(KrbFailed,      getAuthMethod(KERBEROS, TOKEN));
+  }
+
+  @Test
+  public void testTokenOnlyServerWithTokens() throws Exception {
+    assertAuthEquals(TOKEN, getAuthMethod(SIMPLE,   TOKEN, true));
+    assertAuthEquals(TOKEN, getAuthMethod(KERBEROS, TOKEN, true));
+    forceSecretManager = false;
+    assertAuthEquals(NoTokenAuth, getAuthMethod(SIMPLE,   TOKEN, true));
+    assertAuthEquals(NoTokenAuth, getAuthMethod(KERBEROS, TOKEN, true));
+  }
+
+  @Test
+  public void testTokenOnlyServerWithInvalidTokens() throws Exception {
+    assertAuthEquals(BadToken, getAuthMethod(SIMPLE,   TOKEN, false));
+    assertAuthEquals(BadToken, getAuthMethod(KERBEROS, TOKEN, false));
+    forceSecretManager = false;
+    assertAuthEquals(NoTokenAuth, getAuthMethod(SIMPLE,   TOKEN, false));
+    assertAuthEquals(NoTokenAuth, getAuthMethod(KERBEROS, TOKEN, false));
+  }
+
+  /*
    * kerberos server
    */
   @Test
   public void testKerberosServer() throws Exception {
-    assertAuthEquals(Denied,    getAuthMethod(SIMPLE,   KERBEROS));
-    assertAuthEquals(KrbFailed, getAuthMethod(KERBEROS, KERBEROS));    
+    assertAuthEquals(Denied(SIMPLE), getAuthMethod(SIMPLE,   KERBEROS));
+    assertAuthEquals(KrbFailed,      getAuthMethod(KERBEROS, KERBEROS));    
   }
 
   @Test
@@ -504,8 +650,8 @@ public class TestSaslRPC {
     assertAuthEquals(TOKEN, getAuthMethod(KERBEROS, KERBEROS, true));
     // can't fallback to simple when using kerberos w/o tokens
     forceSecretManager = false;
-    assertAuthEquals(NoDigest, getAuthMethod(SIMPLE,   KERBEROS, true));
-    assertAuthEquals(NoDigest, getAuthMethod(KERBEROS, KERBEROS, true));
+    assertAuthEquals(Denied(TOKEN), getAuthMethod(SIMPLE,   KERBEROS, true));
+    assertAuthEquals(Denied(TOKEN), getAuthMethod(KERBEROS, KERBEROS, true));
   }
 
   @Test
@@ -513,8 +659,8 @@ public class TestSaslRPC {
     assertAuthEquals(BadToken, getAuthMethod(SIMPLE,   KERBEROS, false));
     assertAuthEquals(BadToken, getAuthMethod(KERBEROS, KERBEROS, false));
     forceSecretManager = false;
-    assertAuthEquals(NoDigest, getAuthMethod(SIMPLE,   KERBEROS, true));
-    assertAuthEquals(NoDigest, getAuthMethod(KERBEROS, KERBEROS, true));
+    assertAuthEquals(Denied(TOKEN), getAuthMethod(SIMPLE,   KERBEROS, false));
+    assertAuthEquals(Denied(TOKEN), getAuthMethod(KERBEROS, KERBEROS, false));
   }
 
 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
(original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
Fri Nov 30 19:58:09 2012
@@ -71,16 +71,75 @@ public class TestUserGroupInformation {
   /** configure ugi */
   @BeforeClass
   public static void setup() {
+    javax.security.auth.login.Configuration.setConfiguration(
+        new DummyLoginConfiguration());
+  }
+  
+  @Before
+  public void setupUgi() {
     conf = new Configuration();
     conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTH_TO_LOCAL,
         "RULE:[2:$1@$0](.*@HADOOP.APACHE.ORG)s/@.*//" +
         "RULE:[1:$1@$0](.*@HADOOP.APACHE.ORG)s/@.*//"
         + "DEFAULT");
     UserGroupInformation.setConfiguration(conf);
-    javax.security.auth.login.Configuration.setConfiguration(
-        new DummyLoginConfiguration());
+    UserGroupInformation.setLoginUser(null);
+  }
+  
+  @After
+  public void resetUgi() {
+    UserGroupInformation.setLoginUser(null);
+  }
+
+  @Test
+  public void testSimpleLogin() throws IOException {
+    tryLoginAuthenticationMethod(AuthenticationMethod.SIMPLE, true);
+  }
+
+  @Test
+  public void testTokenLogin() throws IOException {
+    tryLoginAuthenticationMethod(AuthenticationMethod.TOKEN, false);
+  }
+  
+  @Test
+  public void testProxyLogin() throws IOException {
+    tryLoginAuthenticationMethod(AuthenticationMethod.PROXY, false);
   }
   
+  private void tryLoginAuthenticationMethod(AuthenticationMethod method,
+                                            boolean expectSuccess)
+                                                throws IOException {
+    SecurityUtil.setAuthenticationMethod(method, conf);
+    UserGroupInformation.setConfiguration(conf); // pick up changed auth       
+
+    UserGroupInformation ugi = null;
+    Exception ex = null;
+    try {
+      ugi = UserGroupInformation.getLoginUser();
+    } catch (Exception e) {
+      ex = e;
+    }
+    if (expectSuccess) {
+      assertNotNull(ugi);
+      assertEquals(method, ugi.getAuthenticationMethod());
+    } else {
+      assertNotNull(ex);
+      assertEquals(UnsupportedOperationException.class, ex.getClass());
+      assertEquals(method + " login authentication is not supported",
+                   ex.getMessage());
+    }
+  }
+  
+  @Test
+  public void testGetRealAuthenticationMethod() {
+    UserGroupInformation ugi = UserGroupInformation.createRemoteUser("user1");
+    ugi.setAuthenticationMethod(AuthenticationMethod.SIMPLE);
+    assertEquals(AuthenticationMethod.SIMPLE, ugi.getAuthenticationMethod());
+    assertEquals(AuthenticationMethod.SIMPLE, ugi.getRealAuthenticationMethod());
+    ugi = UserGroupInformation.createProxyUser("user2", ugi);
+    assertEquals(AuthenticationMethod.PROXY, ugi.getAuthenticationMethod());
+    assertEquals(AuthenticationMethod.SIMPLE, ugi.getRealAuthenticationMethod());
+  }
   /** Test login method */
   @Test
   public void testLogin() throws Exception {



Mime
View raw message