hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From umamah...@apache.org
Subject svn commit: r1593948 - in /hadoop/common/branches/HDFS-2006/hadoop-common-project: ./ hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/ hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/ hadoop-auth/src/...
Date Mon, 12 May 2014 12:44:04 GMT
Author: umamahesh
Date: Mon May 12 12:43:59 2014
New Revision: 1593948

URL: http://svn.apache.org/r1593948
Log:
Merge from trunk to HDFS-2006 branch

Added:
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/
      - copied from r1593927, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNSToSwitchMappingWithDependency.java
      - copied unchanged from r1593927, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNSToSwitchMappingWithDependency.java
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMappingWithDependency.java
      - copied unchanged from r1593927, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMappingWithDependency.java
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestScriptBasedMappingWithDependency.java
      - copied unchanged from r1593927, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestScriptBasedMappingWithDependency.java
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-kms/
      - copied from r1593927, hadoop/common/trunk/hadoop-common-project/hadoop-kms/
Removed:
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/resources/webapps/test/testjsp.jsp
Modified:
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosName.java
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosName.java
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/pom.xml
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/   (props changed)
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathIOException.java
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.crypto.key.KeyProviderFactory
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/site/apt/ClusterSetup.apt.vm
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/site/apt/SecureMode.apt.vm
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathExceptions.java
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/JarFinder.java
    hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java
    hadoop/common/branches/HDFS-2006/hadoop-common-project/pom.xml

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java Mon May 12 12:43:59 2014
@@ -34,16 +34,18 @@ import javax.security.auth.login.LoginEx
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
+
 import java.io.File;
 import java.io.IOException;
-import java.security.Principal;
 import java.security.PrivilegedActionException;
 import java.security.PrivilegedExceptionAction;
+import java.util.ArrayList;
 import java.util.HashMap;
-import java.util.HashSet;
+import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 import java.util.Set;
+import java.util.regex.Pattern;
 
 import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
 
@@ -140,10 +142,10 @@ public class KerberosAuthenticationHandl
    */
   public static final String NAME_RULES = TYPE + ".name.rules";
 
-  private String principal;
   private String keytab;
   private GSSManager gssManager;
-  private LoginContext loginContext;
+  private Subject serverSubject = new Subject();
+  private List<LoginContext> loginContexts = new ArrayList<LoginContext>();
 
   /**
    * Initializes the authentication handler instance.
@@ -159,7 +161,7 @@ public class KerberosAuthenticationHandl
   @Override
   public void init(Properties config) throws ServletException {
     try {
-      principal = config.getProperty(PRINCIPAL, principal);
+      String principal = config.getProperty(PRINCIPAL);
       if (principal == null || principal.trim().length() == 0) {
         throw new ServletException("Principal not defined in configuration");
       }
@@ -170,23 +172,40 @@ public class KerberosAuthenticationHandl
       if (!new File(keytab).exists()) {
         throw new ServletException("Keytab does not exist: " + keytab);
       }
+      
+      // use all SPNEGO principals in the keytab if a principal isn't
+      // specifically configured
+      final String[] spnegoPrincipals;
+      if (principal.equals("*")) {
+        spnegoPrincipals = KerberosUtil.getPrincipalNames(
+            keytab, Pattern.compile("HTTP/.*"));
+        if (spnegoPrincipals.length == 0) {
+          throw new ServletException("Principals do not exist in the keytab");
+        }
+      } else {
+        spnegoPrincipals = new String[]{principal};
+      }
 
       String nameRules = config.getProperty(NAME_RULES, null);
       if (nameRules != null) {
         KerberosName.setRules(nameRules);
       }
       
-      Set<Principal> principals = new HashSet<Principal>();
-      principals.add(new KerberosPrincipal(principal));
-      Subject subject = new Subject(false, principals, new HashSet<Object>(), new HashSet<Object>());
-
-      KerberosConfiguration kerberosConfiguration = new KerberosConfiguration(keytab, principal);
-
-      LOG.info("Login using keytab "+keytab+", for principal "+principal);
-      loginContext = new LoginContext("", subject, null, kerberosConfiguration);
-      loginContext.login();
-
-      Subject serverSubject = loginContext.getSubject();
+      for (String spnegoPrincipal : spnegoPrincipals) {
+        LOG.info("Login using keytab {}, for principal {}",
+            keytab, principal);
+        final KerberosConfiguration kerberosConfiguration =
+            new KerberosConfiguration(keytab, spnegoPrincipal);
+        final LoginContext loginContext =
+            new LoginContext("", serverSubject, null, kerberosConfiguration);
+        try {
+          loginContext.login();
+        } catch (LoginException le) {
+          LOG.warn("Failed to login as [{}]", spnegoPrincipal, le);
+          throw new AuthenticationException(le);          
+        }
+        loginContexts.add(loginContext);
+      }
       try {
         gssManager = Subject.doAs(serverSubject, new PrivilegedExceptionAction<GSSManager>() {
 
@@ -198,7 +217,6 @@ public class KerberosAuthenticationHandl
       } catch (PrivilegedActionException ex) {
         throw ex.getException();
       }
-      LOG.info("Initialized, principal [{}] from keytab [{}]", principal, keytab);
     } catch (Exception ex) {
       throw new ServletException(ex);
     }
@@ -211,14 +229,16 @@ public class KerberosAuthenticationHandl
    */
   @Override
   public void destroy() {
-    try {
-      if (loginContext != null) {
+    keytab = null;
+    serverSubject = null;
+    for (LoginContext loginContext : loginContexts) {
+      try {
         loginContext.logout();
-        loginContext = null;
+      } catch (LoginException ex) {
+        LOG.warn(ex.getMessage(), ex);
       }
-    } catch (LoginException ex) {
-      LOG.warn(ex.getMessage(), ex);
     }
+    loginContexts.clear();
   }
 
   /**
@@ -233,12 +253,12 @@ public class KerberosAuthenticationHandl
   }
 
   /**
-   * Returns the Kerberos principal used by the authentication handler.
+   * Returns the Kerberos principals used by the authentication handler.
    *
-   * @return the Kerberos principal used by the authentication handler.
+   * @return the Kerberos principals used by the authentication handler.
    */
-  protected String getPrincipal() {
-    return principal;
+  protected Set<KerberosPrincipal> getPrincipals() {
+    return serverSubject.getPrincipals(KerberosPrincipal.class);
   }
 
   /**
@@ -304,7 +324,7 @@ public class KerberosAuthenticationHandl
       authorization = authorization.substring(KerberosAuthenticator.NEGOTIATE.length()).trim();
       final Base64 base64 = new Base64(0);
       final byte[] clientToken = base64.decode(authorization);
-      Subject serverSubject = loginContext.getSubject();
+      final String serverName = request.getServerName();
       try {
         token = Subject.doAs(serverSubject, new PrivilegedExceptionAction<AuthenticationToken>() {
 
@@ -314,15 +334,15 @@ public class KerberosAuthenticationHandl
             GSSContext gssContext = null;
             GSSCredential gssCreds = null;
             try {
-              if (IBM_JAVA) {
-                // IBM JDK needs non-null credentials to be passed to createContext here, with
-                // SPNEGO mechanism specified, otherwise JGSS will use its default mechanism
-                // only, which is Kerberos V5.
-                gssCreds = gssManager.createCredential(null, GSSCredential.INDEFINITE_LIFETIME,
-                    new Oid[]{KerberosUtil.getOidInstance("GSS_SPNEGO_MECH_OID"),
-                        KerberosUtil.getOidInstance("GSS_KRB5_MECH_OID")},
-                    GSSCredential.ACCEPT_ONLY);
-              }
+              gssCreds = gssManager.createCredential(
+                  gssManager.createName(
+                      KerberosUtil.getServicePrincipal("HTTP", serverName),
+                      KerberosUtil.getOidInstance("NT_GSS_KRB5_PRINCIPAL")),
+                  GSSCredential.INDEFINITE_LIFETIME,
+                  new Oid[]{
+                    KerberosUtil.getOidInstance("GSS_SPNEGO_MECH_OID"),
+                    KerberosUtil.getOidInstance("GSS_KRB5_MECH_OID")},
+                  GSSCredential.ACCEPT_ONLY);
               gssContext = gssManager.createContext(gssCreds);
               byte[] serverToken = gssContext.acceptSecContext(clientToken, 0, clientToken.length);
               if (serverToken != null && serverToken.length > 0) {

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosName.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosName.java?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosName.java (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosName.java Mon May 12 12:43:59 2014
@@ -21,6 +21,7 @@ package org.apache.hadoop.security.authe
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Locale;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
@@ -66,7 +67,7 @@ public class KerberosName {
    */
   private static final Pattern ruleParser =
     Pattern.compile("\\s*((DEFAULT)|(RULE:\\[(\\d*):([^\\]]*)](\\(([^)]*)\\))?"+
-                    "(s/([^/]*)/([^/]*)/(g)?)?))");
+                    "(s/([^/]*)/([^/]*)/(g)?)?))/?(L)?");
 
   /**
    * A pattern that recognizes simple/non-simple names.
@@ -171,6 +172,7 @@ public class KerberosName {
     private final Pattern fromPattern;
     private final String toPattern;
     private final boolean repeat;
+    private final boolean toLowerCase;
 
     Rule() {
       isDefault = true;
@@ -180,10 +182,11 @@ public class KerberosName {
       fromPattern = null;
       toPattern = null;
       repeat = false;
+      toLowerCase = false;
     }
 
     Rule(int numOfComponents, String format, String match, String fromPattern,
-         String toPattern, boolean repeat) {
+         String toPattern, boolean repeat, boolean toLowerCase) {
       isDefault = false;
       this.numOfComponents = numOfComponents;
       this.format = format;
@@ -192,6 +195,7 @@ public class KerberosName {
         fromPattern == null ? null : Pattern.compile(fromPattern);
       this.toPattern = toPattern;
       this.repeat = repeat;
+      this.toLowerCase = toLowerCase;
     }
 
     @Override
@@ -220,6 +224,9 @@ public class KerberosName {
             buf.append('g');
           }
         }
+        if (toLowerCase) {
+          buf.append("/L");
+        }
       }
       return buf.toString();
     }
@@ -308,6 +315,9 @@ public class KerberosName {
         throw new NoMatchingRule("Non-simple name " + result +
                                  " after auth_to_local rule " + this);
       }
+      if (toLowerCase && result != null) {
+        result = result.toLowerCase(Locale.ENGLISH);
+      }
       return result;
     }
   }
@@ -328,7 +338,8 @@ public class KerberosName {
                             matcher.group(7),
                             matcher.group(9),
                             matcher.group(10),
-                            "g".equals(matcher.group(11))));
+                            "g".equals(matcher.group(11)),
+                            "L".equals(matcher.group(12))));
       }
       remaining = remaining.substring(matcher.end());
     }

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java Mon May 12 12:43:59 2014
@@ -18,6 +18,7 @@ import org.apache.hadoop.security.authen
 import org.apache.hadoop.security.authentication.client.AuthenticationException;
 import org.apache.hadoop.security.authentication.client.KerberosAuthenticator;
 import org.apache.commons.codec.binary.Base64;
+import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.apache.hadoop.security.authentication.util.KerberosUtil;
 import org.ietf.jgss.GSSContext;
@@ -30,10 +31,18 @@ import org.junit.Test;
 import org.mockito.Mockito;
 import org.ietf.jgss.Oid;
 
+import javax.security.auth.Subject;
+import javax.security.auth.kerberos.KerberosPrincipal;
+import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
+
 import java.io.File;
+import java.security.Principal;
+import java.util.Arrays;
+import java.util.List;
 import java.util.Properties;
+import java.util.Set;
 import java.util.concurrent.Callable;
 
 public class TestKerberosAuthenticationHandler
@@ -110,8 +119,65 @@ public class TestKerberosAuthenticationH
 
   @Test(timeout=60000)
   public void testInit() throws Exception {
-    Assert.assertEquals(KerberosTestUtils.getServerPrincipal(), handler.getPrincipal());
     Assert.assertEquals(KerberosTestUtils.getKeytabFile(), handler.getKeytab());
+    Set<KerberosPrincipal> principals = handler.getPrincipals();
+    Principal expectedPrincipal =
+        new KerberosPrincipal(KerberosTestUtils.getServerPrincipal());
+    Assert.assertTrue(principals.contains(expectedPrincipal));
+    Assert.assertEquals(1, principals.size());
+  }
+
+  // dynamic configuration of HTTP principals
+  @Test(timeout=60000)
+  public void testDynamicPrincipalDiscovery() throws Exception {
+    String[] keytabUsers = new String[]{
+        "HTTP/host1", "HTTP/host2", "HTTP2/host1", "XHTTP/host"
+    };
+    String keytab = KerberosTestUtils.getKeytabFile();
+    getKdc().createPrincipal(new File(keytab), keytabUsers);
+
+    // destroy handler created in setUp()
+    handler.destroy();
+    Properties props = new Properties();
+    props.setProperty(KerberosAuthenticationHandler.KEYTAB, keytab);
+    props.setProperty(KerberosAuthenticationHandler.PRINCIPAL, "*");
+    handler = getNewAuthenticationHandler();
+    handler.init(props);
+
+    Assert.assertEquals(KerberosTestUtils.getKeytabFile(), handler.getKeytab());    
+    
+    Set<KerberosPrincipal> loginPrincipals = handler.getPrincipals();
+    for (String user : keytabUsers) {
+      Principal principal = new KerberosPrincipal(
+          user + "@" + KerberosTestUtils.getRealm());
+      boolean expected = user.startsWith("HTTP/");
+      Assert.assertEquals("checking for "+user, expected, 
+          loginPrincipals.contains(principal));
+    }
+  }
+
+  // dynamic configuration of HTTP principals
+  @Test(timeout=60000)
+  public void testDynamicPrincipalDiscoveryMissingPrincipals() throws Exception {
+    String[] keytabUsers = new String[]{"hdfs/localhost"};
+    String keytab = KerberosTestUtils.getKeytabFile();
+    getKdc().createPrincipal(new File(keytab), keytabUsers);
+
+    // destroy handler created in setUp()
+    handler.destroy();
+    Properties props = new Properties();
+    props.setProperty(KerberosAuthenticationHandler.KEYTAB, keytab);
+    props.setProperty(KerberosAuthenticationHandler.PRINCIPAL, "*");
+    handler = getNewAuthenticationHandler();
+    try {
+      handler.init(props);
+      Assert.fail("init should have failed");
+    } catch (ServletException ex) {
+      Assert.assertEquals("Principals do not exist in the keytab",
+          ex.getCause().getMessage());
+    } catch (Throwable t) {
+      Assert.fail("wrong exception: "+t);
+    }
   }
 
   @Test(timeout=60000)
@@ -190,7 +256,8 @@ public class TestKerberosAuthenticationH
 
     Mockito.when(request.getHeader(KerberosAuthenticator.AUTHORIZATION))
       .thenReturn(KerberosAuthenticator.NEGOTIATE + " " + token);
-
+    Mockito.when(request.getServerName()).thenReturn("localhost");
+    
     AuthenticationToken authToken = handler.authenticate(request, response);
 
     if (authToken != null) {

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosName.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosName.java?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosName.java (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosName.java Mon May 12 12:43:59 2014
@@ -91,6 +91,22 @@ public class TestKerberosName {
     checkBadTranslation("root/joe@FOO.COM");
   }
 
+  @Test
+  public void testToLowerCase() throws Exception {
+    String rules =
+        "RULE:[1:$1]/L\n" +
+        "RULE:[2:$1]/L\n" +
+        "RULE:[2:$1;$2](^.*;admin$)s/;admin$///L\n" +
+        "RULE:[2:$1;$2](^.*;guest$)s/;guest$//g/L\n" +
+        "DEFAULT";
+    KerberosName.setRules(rules);
+    KerberosName.printRules();
+    checkTranslation("Joe@FOO.COM", "joe");
+    checkTranslation("Joe/root@FOO.COM", "joe");
+    checkTranslation("Joe/admin@FOO.COM", "joe");
+    checkTranslation("Joe/guestguest@FOO.COM", "joe");
+  }
+
   @After
   public void clear() {
     System.clearProperty("java.security.krb5.realm");

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/CHANGES.txt Mon May 12 12:43:59 2014
@@ -8,6 +8,8 @@ Trunk (Unreleased)
     FSDataOutputStream.sync() and Syncable.sync().  (szetszwo)
 
   NEW FEATURES
+
+    HADOOP-10433. Key Management Server based on KeyProvider API. (tucu)
     
   IMPROVEMENTS
 
@@ -144,6 +146,8 @@ Trunk (Unreleased)
     HADOOP-10534. KeyProvider getKeysMetadata should take a list of names 
     rather than returning all keys. (omalley)
 
+    HADOOP-10563. Remove the dependency of jsp in trunk. (wheat9)
+
   BUG FIXES
 
     HADOOP-9451. Fault single-layer config if node group topology is enabled.
@@ -361,6 +365,21 @@ Release 2.5.0 - UNRELEASED
     HADOOP-10322. Add ability to read principal names from a keytab.
     (Benoy Antony and Daryn Sharp via kihwal)
 
+    HADOOP-10549. MAX_SUBST and varPat should be final in Configuration.java.
+    (Gera Shegalov via cnauroth)
+
+    HADOOP-10471. Reduce the visibility of constants in ProxyUsers.
+    (Benoy Antony via wheat9)
+
+    HADOOP-10556. Add toLowerCase support to auth_to_local rules 
+    for service name. (tucu)
+
+    HADOOP-10467. Enable proxyuser specification to support list of users in
+    addition to list of groups (Benoy Antony via Arpit Agarwal)
+
+    HADOOP-10158. SPNEGO should work with multiple interfaces/SPNs.
+    (daryn via kihwal)
+
   OPTIMIZATIONS
 
   BUG FIXES 
@@ -422,6 +441,30 @@ Release 2.5.0 - UNRELEASED
     HADOOP-10540. Datanode upgrade in Windows fails with hardlink error.
     (Chris Nauroth and Arpit Agarwal)
 
+    HADOOP-10508. RefreshCallQueue fails when authorization is enabled.
+    (Chris Li via wheat9)
+
+    HADOOP-10547. Give SaslPropertiesResolver.getDefaultProperties() public
+    scope. (Benoy Antony via Arpit Agarwal)
+
+    HADOOP-10543. RemoteException's unwrapRemoteException method failed for
+    PathIOException. (Yongjun Zhang via atm)
+
+    HADOOP-10562. Namenode exits on exception without printing stack trace
+    in AbstractDelegationTokenSecretManager. (Arpit Agarwal)
+
+    HADOOP-10568. Add s3 server-side encryption. (David S. Wang via atm)
+
+    HADOOP-10541. InputStream in MiniKdc#initKDCServer for minikdc.ldiff is not
+    closed. (Swarnim Kulkarni via cnauroth)
+
+    HADOOP-10517. InputStream is not closed in two methods of JarFinder.
+    (Ted Yu via cnauroth)
+
+    HADOOP-10581. TestUserGroupInformation#testGetServerSideGroups fails
+    because groups stored in Set and ArrayList are compared. 
+    (Mit Desai via kihwal)
+
 Release 2.4.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Propchange: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1588992-1593927

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml Mon May 12 12:43:59 2014
@@ -357,4 +357,10 @@
        <Bug code="NP" />
      </Match>
 
+  <Match>
+    <Class name="org.apache.hadoop.crypto.key.kms.KMSClientProvider"/>
+    <Method name="validateResponse"/>
+    <Bug pattern="REC_CATCH_EXCEPTION"/>
+  </Match>
+
 </FindBugsFilter>

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/pom.xml?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/pom.xml (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/pom.xml Mon May 12 12:43:59 2014
@@ -120,26 +120,6 @@
       <scope>compile</scope>
     </dependency>
     <dependency>
-      <groupId>tomcat</groupId>
-      <artifactId>jasper-compiler</artifactId>
-      <scope>runtime</scope>
-    </dependency>
-    <dependency>
-      <groupId>tomcat</groupId>
-      <artifactId>jasper-runtime</artifactId>
-      <scope>runtime</scope>
-    </dependency>
-    <dependency>
-      <groupId>javax.servlet.jsp</groupId>
-      <artifactId>jsp-api</artifactId>
-      <scope>runtime</scope>
-    </dependency>
-    <dependency>
-      <groupId>commons-el</groupId>
-      <artifactId>commons-el</artifactId>
-      <scope>runtime</scope>
-    </dependency>
-    <dependency>
       <groupId>commons-logging</groupId>
       <artifactId>commons-logging</artifactId>
       <scope>compile</scope>

Propchange: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1588992-1593927

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java Mon May 12 12:43:59 2014
@@ -797,14 +797,16 @@ public class Configuration implements It
     reloadConfiguration();
   }
   
-  private static Pattern varPat = Pattern.compile("\\$\\{[^\\}\\$\u0020]+\\}");
-  private static int MAX_SUBST = 20;
+  private static final Pattern VAR_PATTERN =
+      Pattern.compile("\\$\\{[^\\}\\$\u0020]+\\}");
+
+  private static final int MAX_SUBST = 20;
 
   private String substituteVars(String expr) {
     if (expr == null) {
       return null;
     }
-    Matcher match = varPat.matcher("");
+    Matcher match = VAR_PATTERN.matcher("");
     String eval = expr;
     Set<String> evalSet = new HashSet<String>();
     for(int s=0; s<MAX_SUBST; s++) {

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java Mon May 12 12:43:59 2014
@@ -139,6 +139,9 @@ public class CommonConfigurationKeys ext
   public static final String 
   HADOOP_SECURITY_SERVICE_AUTHORIZATION_REFRESH_USER_MAPPINGS =
       "security.refresh.user.mappings.protocol.acl";
+  public static final String
+  HADOOP_SECURITY_SERVICE_AUTHORIZATION_REFRESH_CALLQUEUE =
+      "security.refresh.callqueue.protocol.acl";
   public static final String 
   SECURITY_HA_SERVICE_PROTOCOL_ACL = "security.ha.service.protocol.acl";
   public static final String 

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java Mon May 12 12:43:59 2014
@@ -78,6 +78,8 @@ public class CommonConfigurationKeysPubl
   /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
   public static final String  NET_TOPOLOGY_TABLE_MAPPING_FILE_KEY =
     "net.topology.table.file.name";
+  public static final String NET_DEPENDENCY_SCRIPT_FILE_NAME_KEY = 
+    "net.topology.dependency.script.file.name";
 
   /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
   public static final String  FS_TRASH_CHECKPOINT_INTERVAL_KEY =

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathIOException.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathIOException.java?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathIOException.java (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathIOException.java Mon May 12 12:43:59 2014
@@ -40,7 +40,7 @@ public class PathIOException extends IOE
    *  @param path for the exception
    */
   public PathIOException(String path) {
-    this(path, EIO, null);
+    this(path, EIO);
   }
 
   /**
@@ -59,7 +59,8 @@ public class PathIOException extends IOE
    * @param error custom string to use an the error text
    */
   public PathIOException(String path, String error) {
-    this(path, error, null);
+    super(error);
+    this.path = path;
   }
 
   protected PathIOException(String path, String error, Throwable cause) {

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java Mon May 12 12:43:59 2014
@@ -63,6 +63,8 @@ class Jets3tNativeFileSystemStore implem
   private boolean multipartEnabled;
   private long multipartCopyBlockSize;
   static final long MAX_PART_SIZE = (long)5 * 1024 * 1024 * 1024;
+
+  private String serverSideEncryptionAlgorithm;
   
   public static final Log LOG =
       LogFactory.getLog(Jets3tNativeFileSystemStore.class);
@@ -87,6 +89,7 @@ class Jets3tNativeFileSystemStore implem
     multipartCopyBlockSize = Math.min(
         conf.getLong("fs.s3n.multipart.copy.block.size", MAX_PART_SIZE),
         MAX_PART_SIZE);
+    serverSideEncryptionAlgorithm = conf.get("fs.s3n.server-side-encryption-algorithm");
 
     bucket = new S3Bucket(uri.getHost());
   }
@@ -107,6 +110,7 @@ class Jets3tNativeFileSystemStore implem
       object.setDataInputStream(in);
       object.setContentType("binary/octet-stream");
       object.setContentLength(file.length());
+      object.setServerSideEncryptionAlgorithm(serverSideEncryptionAlgorithm);
       if (md5Hash != null) {
         object.setMd5Hash(md5Hash);
       }
@@ -130,6 +134,7 @@ class Jets3tNativeFileSystemStore implem
     object.setDataInputFile(file);
     object.setContentType("binary/octet-stream");
     object.setContentLength(file.length());
+    object.setServerSideEncryptionAlgorithm(serverSideEncryptionAlgorithm);
     if (md5Hash != null) {
       object.setMd5Hash(md5Hash);
     }
@@ -156,6 +161,7 @@ class Jets3tNativeFileSystemStore implem
       object.setDataInputStream(new ByteArrayInputStream(new byte[0]));
       object.setContentType("binary/octet-stream");
       object.setContentLength(0);
+      object.setServerSideEncryptionAlgorithm(serverSideEncryptionAlgorithm);
       s3Service.putObject(bucket, object);
     } catch (S3ServiceException e) {
       handleS3ServiceException(e);
@@ -317,8 +323,11 @@ class Jets3tNativeFileSystemStore implem
           return;
         }
       }
+
+      S3Object dstObject = new S3Object(dstKey);
+      dstObject.setServerSideEncryptionAlgorithm(serverSideEncryptionAlgorithm);
       s3Service.copyObject(bucket.getName(), srcKey, bucket.getName(),
-          new S3Object(dstKey), false);
+          dstObject, false);
     } catch (ServiceException e) {
       handleServiceException(srcKey, e);
     }

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java Mon May 12 12:43:59 2014
@@ -45,7 +45,7 @@ import org.apache.hadoop.fs.CommonConfig
  */
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
-public final class ScriptBasedMapping extends CachedDNSToSwitchMapping {
+public class ScriptBasedMapping extends CachedDNSToSwitchMapping {
 
   /**
    * Minimum number of arguments: {@value}
@@ -63,6 +63,7 @@ public final class ScriptBasedMapping ex
    */
   static final String SCRIPT_FILENAME_KEY = 
                      CommonConfigurationKeys.NET_TOPOLOGY_SCRIPT_FILE_NAME_KEY ;
+
   /**
    * key to the argument count that the script supports
    * {@value}
@@ -84,7 +85,15 @@ public final class ScriptBasedMapping ex
    *
    */
   public ScriptBasedMapping() {
-    super(new RawScriptBasedMapping());
+    this(new RawScriptBasedMapping());
+  }
+
+  /**
+   * Create an instance from the given raw mapping
+   * @param rawMap raw DNSTOSwithMapping
+   */
+  public ScriptBasedMapping(DNSToSwitchMapping rawMap) {
+    super(rawMap);
   }
 
   /**
@@ -132,7 +141,7 @@ public final class ScriptBasedMapping ex
    * This is the uncached script mapping that is fed into the cache managed
    * by the superclass {@link CachedDNSToSwitchMapping}
    */
-  private static final class RawScriptBasedMapping
+  protected static class RawScriptBasedMapping
       extends AbstractDNSToSwitchMapping {
     private String scriptName;
     private int maxArgs; //max hostnames per call of the script
@@ -176,7 +185,7 @@ public final class ScriptBasedMapping ex
         return m;
       }
 
-      String output = runResolveCommand(names);
+      String output = runResolveCommand(names, scriptName);
       if (output != null) {
         StringTokenizer allSwitchInfo = new StringTokenizer(output);
         while (allSwitchInfo.hasMoreTokens()) {
@@ -208,7 +217,8 @@ public final class ScriptBasedMapping ex
      * @return null if the number of arguments is out of range,
      * or the output of the command.
      */
-    private String runResolveCommand(List<String> args) {
+    protected String runResolveCommand(List<String> args, 
+        String commandScriptName) {
       int loopCount = 0;
       if (args.size() == 0) {
         return null;
@@ -225,7 +235,7 @@ public final class ScriptBasedMapping ex
       while (numProcessed != args.size()) {
         int start = maxArgs * loopCount;
         List<String> cmdList = new ArrayList<String>();
-        cmdList.add(scriptName);
+        cmdList.add(commandScriptName);
         for (numProcessed = start; numProcessed < (start + maxArgs) &&
             numProcessed < args.size(); numProcessed++) {
           cmdList.add(args.get(numProcessed));

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java Mon May 12 12:43:59 2014
@@ -80,7 +80,7 @@ public class SaslPropertiesResolver impl
    * The default Sasl Properties read from the configuration
    * @return sasl Properties
    */
-  protected Map<String,String> getDefaultProperties() {
+  public Map<String,String> getDefaultProperties() {
     return properties;
   }
 

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java Mon May 12 12:43:59 2014
@@ -37,6 +37,7 @@ import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.Iterator;
+import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -1464,7 +1465,8 @@ public class UserGroupInformation {
   public synchronized String[] getGroupNames() {
     ensureInitialized();
     try {
-      List<String> result = groups.getGroups(getShortUserName());
+      Set<String> result = new LinkedHashSet<String>
+        (groups.getGroups(getShortUserName()));
       return result.toArray(new String[result.size()]);
     } catch (IOException ie) {
       LOG.warn("No groups available for user " + getShortUserName());

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java Mon May 12 12:43:59 2014
@@ -40,13 +40,16 @@ import com.google.common.annotations.Vis
 public class ProxyUsers {
 
   private static final String CONF_HOSTS = ".hosts";
-  public static final String CONF_GROUPS = ".groups";
-  public static final String CONF_HADOOP_PROXYUSER = "hadoop.proxyuser.";
-  public static final String CONF_HADOOP_PROXYUSER_RE = "hadoop\\.proxyuser\\.";
+  private static final String CONF_USERS = ".users";
+  private static final String CONF_GROUPS = ".groups";
+  private static final String CONF_HADOOP_PROXYUSER = "hadoop.proxyuser.";
+  private static final String CONF_HADOOP_PROXYUSER_RE = "hadoop\\.proxyuser\\.";
   public static final String CONF_HADOOP_PROXYSERVERS = "hadoop.proxyservers";
   
   private static boolean init = false;
-  // list of groups and hosts per proxyuser
+  //list of users, groups and hosts per proxyuser
+  private static Map<String, Collection<String>> proxyUsers =
+    new HashMap<String, Collection<String>>();
   private static Map<String, Collection<String>> proxyGroups = 
     new HashMap<String, Collection<String>>();
   private static Map<String, Collection<String>> proxyHosts = 
@@ -55,7 +58,7 @@ public class ProxyUsers {
     new HashSet<String>();
 
   /**
-   * reread the conf and get new values for "hadoop.proxyuser.*.groups/hosts"
+   * reread the conf and get new values for "hadoop.proxyuser.*.groups/users/hosts"
    */
   public static void refreshSuperUserGroupsConfiguration() {
     //load server side configuration;
@@ -71,11 +74,20 @@ public class ProxyUsers {
     // remove all existing stuff
     proxyGroups.clear();
     proxyHosts.clear();
+    proxyUsers.clear();
     proxyServers.clear();
+    
+    // get all the new keys for users
+    String regex = CONF_HADOOP_PROXYUSER_RE+"[^.]*\\"+CONF_USERS;
+    Map<String,String> allMatchKeys = conf.getValByRegex(regex);
+    for(Entry<String, String> entry : allMatchKeys.entrySet()) {  
+        Collection<String> users = StringUtils.getTrimmedStringCollection(entry.getValue());
+        proxyUsers.put(entry.getKey(), users);
+      }
 
     // get all the new keys for groups
-    String regex = CONF_HADOOP_PROXYUSER_RE+"[^.]*\\"+CONF_GROUPS;
-    Map<String,String> allMatchKeys = conf.getValByRegex(regex);
+    regex = CONF_HADOOP_PROXYUSER_RE+"[^.]*\\"+CONF_GROUPS;
+    allMatchKeys = conf.getValByRegex(regex);
     for(Entry<String, String> entry : allMatchKeys.entrySet()) {
       Collection<String> groups = StringUtils.getTrimmedStringCollection(entry.getValue());
       proxyGroups.put(entry.getKey(), groups );
@@ -108,7 +120,17 @@ public class ProxyUsers {
     }
     return proxyServers.contains(remoteAddr);
   }
-
+  
+  /**
+   * Returns configuration key for effective users allowed for a superuser
+   * 
+   * @param userName name of the superuser
+   * @return configuration key for superuser users
+   */
+  public static String getProxySuperuserUserConfKey(String userName) {
+    return ProxyUsers.CONF_HADOOP_PROXYUSER+userName+ProxyUsers.CONF_USERS;
+  }
+  
   /**
    * Returns configuration key for effective user groups allowed for a superuser
    * 
@@ -146,27 +168,40 @@ public class ProxyUsers {
     if (user.getRealUser() == null) {
       return;
     }
-    boolean groupAuthorized = false;
+    boolean userAuthorized = false;
     boolean ipAuthorized = false;
     UserGroupInformation superUser = user.getRealUser();
-
-    Collection<String> allowedUserGroups = proxyGroups.get(
-        getProxySuperuserGroupConfKey(superUser.getShortUserName()));
     
-    if (isWildcardList(allowedUserGroups)) {
-      groupAuthorized = true;
-    } else if (allowedUserGroups != null && !allowedUserGroups.isEmpty()) {
-      for (String group : user.getGroupNames()) {
-        if (allowedUserGroups.contains(group)) {
-          groupAuthorized = true;
-          break;
-        }
+    Collection<String> allowedUsers = proxyUsers.get(
+        getProxySuperuserUserConfKey(superUser.getShortUserName()));
+
+    if (isWildcardList(allowedUsers)) {
+      userAuthorized = true;
+    } else if (allowedUsers != null && !allowedUsers.isEmpty()) {
+      if (allowedUsers.contains(user.getShortUserName())) {
+        userAuthorized = true;
       }
     }
 
-    if (!groupAuthorized) {
-      throw new AuthorizationException("User: " + superUser.getUserName()
-          + " is not allowed to impersonate " + user.getUserName());
+    if (!userAuthorized) {
+      Collection<String> allowedUserGroups = proxyGroups.get(
+          getProxySuperuserGroupConfKey(superUser.getShortUserName()));
+      
+      if (isWildcardList(allowedUserGroups)) {
+        userAuthorized = true;
+      } else if (allowedUserGroups != null && !allowedUserGroups.isEmpty()) {
+        for (String group : user.getGroupNames()) {
+          if (allowedUserGroups.contains(group)) {
+            userAuthorized = true;
+            break;
+          }
+        }
+      }
+
+      if (!userAuthorized) {
+        throw new AuthorizationException("User: " + superUser.getUserName()
+            + " is not allowed to impersonate " + user.getUserName());
+      }
     }
     
     Collection<String> ipList = proxyHosts.get(
@@ -188,7 +223,7 @@ public class ProxyUsers {
         }
       }
     }
-    if(!ipAuthorized) {
+    if (!ipAuthorized) {
       throw new AuthorizationException("Unauthorized connection for super-user: "
           + superUser.getUserName() + " from IP " + remoteAddress);
     }
@@ -217,6 +252,11 @@ public class ProxyUsers {
       (list.size() == 1) &&
       (list.contains("*"));
   }
+   
+  @VisibleForTesting
+  public static Map<String, Collection<String>> getProxyUsers() {
+    return proxyUsers;
+  }
 
   @VisibleForTesting
   public static Map<String, Collection<String>> getProxyGroups() {

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java Mon May 12 12:43:59 2014
@@ -209,8 +209,7 @@ extends AbstractDelegationTokenIdentifie
       currentTokens.put(identifier, new DelegationTokenInformation(renewDate,
           password, getTrackingIdIfEnabled(identifier)));
     } else {
-      throw new IOException(
-          "Same delegation token being added twice.");
+      throw new IOException("Same delegation token being added twice.");
     }
   }
 
@@ -355,27 +354,24 @@ extends AbstractDelegationTokenIdentifie
    */
   public synchronized long renewToken(Token<TokenIdent> token,
                          String renewer) throws InvalidToken, IOException {
-    long now = Time.now();
     ByteArrayInputStream buf = new ByteArrayInputStream(token.getIdentifier());
     DataInputStream in = new DataInputStream(buf);
     TokenIdent id = createIdentifier();
     id.readFields(in);
-    LOG.info("Token renewal requested for identifier: "+id);
-    
+    LOG.info("Token renewal for identifier: " + id + "; total currentTokens "
+        +  currentTokens.size());
+
+    long now = Time.now();
     if (id.getMaxDate() < now) {
-      throw new InvalidToken("User " + renewer + 
-                             " tried to renew an expired token");
+      throw new InvalidToken(renewer + " tried to renew an expired token");
     }
     if ((id.getRenewer() == null) || (id.getRenewer().toString().isEmpty())) {
-      throw new AccessControlException("User " + renewer + 
-                                       " tried to renew a token without " +
-                                       "a renewer");
+      throw new AccessControlException(renewer +
+          " tried to renew a token without a renewer");
     }
     if (!id.getRenewer().toString().equals(renewer)) {
-      throw new AccessControlException("Client " + renewer + 
-                                       " tries to renew a token with " +
-                                       "renewer specified as " + 
-                                       id.getRenewer());
+      throw new AccessControlException(renewer +
+          " tries to renew a token with renewer " + id.getRenewer());
     }
     DelegationKey key = allKeys.get(id.getMasterKeyId());
     if (key == null) {
@@ -386,8 +382,8 @@ extends AbstractDelegationTokenIdentifie
     }
     byte[] password = createPassword(token.getIdentifier(), key.getKey());
     if (!Arrays.equals(password, token.getPassword())) {
-      throw new AccessControlException("Client " + renewer
-          + " is trying to renew a token with " + "wrong password");
+      throw new AccessControlException(renewer +
+          " is trying to renew a token with wrong password");
     }
     long renewTime = Math.min(id.getMaxDate(), now + tokenRenewInterval);
     String trackingId = getTrackingIdIfEnabled(id);
@@ -429,8 +425,7 @@ extends AbstractDelegationTokenIdentifie
       throw new AccessControlException(canceller
           + " is not authorized to cancel the token");
     }
-    DelegationTokenInformation info = null;
-    info = currentTokens.remove(id);
+    DelegationTokenInformation info = currentTokens.remove(id);
     if (info == null) {
       throw new InvalidToken("Token not found");
     }
@@ -554,14 +549,11 @@ extends AbstractDelegationTokenIdentifie
           try {
             Thread.sleep(Math.min(5000, keyUpdateInterval)); // 5 seconds
           } catch (InterruptedException ie) {
-            LOG
-            .error("InterruptedExcpetion recieved for ExpiredTokenRemover thread "
-                + ie);
+            LOG.error("ExpiredTokenRemover received " + ie);
           }
         }
       } catch (Throwable t) {
-        LOG.error("ExpiredTokenRemover thread received unexpected exception. "
-            + t);
+        LOG.error("ExpiredTokenRemover thread received unexpected exception", t);
         Runtime.getRuntime().exit(-1);
       }
     }

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java Mon May 12 12:43:59 2014
@@ -141,8 +141,7 @@ public class CompositeService extends Ab
    * @throws RuntimeException the first exception raised during the
    * stop process -<i>after all services are stopped</i>
    */
-  private synchronized void stop(int numOfServicesStarted,
-                                 boolean stopOnlyStartedServices) {
+  private void stop(int numOfServicesStarted, boolean stopOnlyStartedServices) {
     // stop in reverse order of start
     Exception firstException = null;
     List<Service> services = getServices();

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.crypto.key.KeyProviderFactory
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.crypto.key.KeyProviderFactory?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.crypto.key.KeyProviderFactory (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.crypto.key.KeyProviderFactory Mon May 12 12:43:59 2014
@@ -15,3 +15,4 @@
 
 org.apache.hadoop.crypto.key.JavaKeyStoreProvider$Factory
 org.apache.hadoop.crypto.key.UserProvider$Factory
+org.apache.hadoop.crypto.key.kms.KMSClientProvider$Factory

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml Mon May 12 12:43:59 2014
@@ -576,6 +576,14 @@
 </property>
 
 <property>
+  <name>fs.s3n.server-side-encryption-algorithm</name>
+  <value></value>
+  <description>Specify a server-side encryption algorithm for S3.
+  The default is NULL, and the only other currently allowable value is AES256.
+  </description>
+</property>
+
+<property>
   <name>io.seqfile.compress.blocksize</name>
   <value>1000000</value>
   <description>The minimum block size for compression in block compressed 

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/site/apt/ClusterSetup.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/site/apt/ClusterSetup.apt.vm?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/site/apt/ClusterSetup.apt.vm (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/site/apt/ClusterSetup.apt.vm Mon May 12 12:43:59 2014
@@ -226,24 +226,34 @@ Hadoop MapReduce Next Generation - Clust
 *-------------------------+-------------------------+------------------------+
 | <<<yarn.resourcemanager.address>>> | | |
 | | <<<ResourceManager>>> host:port for clients to submit jobs. | |
-| | | <host:port> |
+| | | <host:port>\ |
+| | | If set, overrides the hostname set in <<<yarn.resourcemanager.hostname>>>. |
 *-------------------------+-------------------------+------------------------+
 | <<<yarn.resourcemanager.scheduler.address>>> | | |
 | | <<<ResourceManager>>> host:port for ApplicationMasters to talk to | |
 | | Scheduler to obtain resources. | |
-| | | <host:port> |
+| | | <host:port>\ |
+| | | If set, overrides the hostname set in <<<yarn.resourcemanager.hostname>>>. |
 *-------------------------+-------------------------+------------------------+
 | <<<yarn.resourcemanager.resource-tracker.address>>> | | |
 | | <<<ResourceManager>>> host:port for NodeManagers. | |
-| | | <host:port> |
+| | | <host:port>\ |
+| | | If set, overrides the hostname set in <<<yarn.resourcemanager.hostname>>>. |
 *-------------------------+-------------------------+------------------------+
 | <<<yarn.resourcemanager.admin.address>>> | | |
 | | <<<ResourceManager>>> host:port for administrative commands. | |
-| | | <host:port> |
+| | | <host:port>\ |
+| | | If set, overrides the hostname set in <<<yarn.resourcemanager.hostname>>>. |
 *-------------------------+-------------------------+------------------------+
 | <<<yarn.resourcemanager.webapp.address>>> | | |
 | | <<<ResourceManager>>> web-ui host:port. | |
-| | | <host:port> |
+| | | <host:port>\ |
+| | | If set, overrides the hostname set in <<<yarn.resourcemanager.hostname>>>. |
+*-------------------------+-------------------------+------------------------+
+| <<<yarn.resourcemanager.hostname>>> | | |
+| | <<<ResourceManager>>> host. | |
+| | | <host>\ |
+| | | Single hostname that can be set in place of setting all <<<yarn.resourcemanager*address>>> resources.  Results in default ports for ResourceManager components. |
 *-------------------------+-------------------------+------------------------+
 | <<<yarn.resourcemanager.scheduler.class>>> | | |
 | | <<<ResourceManager>>> Scheduler class. | |

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/site/apt/SecureMode.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/site/apt/SecureMode.apt.vm?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/site/apt/SecureMode.apt.vm (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/site/apt/SecureMode.apt.vm Mon May 12 12:43:59 2014
@@ -176,9 +176,11 @@ KVNO Timestamp         Principal
   the rule specified by <<<hadoop.security.auth_to_local>>>
   which works in the same way as the <<<auth_to_local>>> in
   {{{http://web.mit.edu/Kerberos/krb5-latest/doc/admin/conf_files/krb5_conf.html}Kerberos configuration file (krb5.conf)}}.
+  In addition, Hadoop <<<auth_to_local>>> mapping supports the <</L>> flag that
+  lowercases the returned name.
 
   By default, it picks the first component of principal name as a user name
-  if the realms matches to the <<<defalut_realm>>> (usually defined in /etc/krb5.conf).
+  if the realms matches to the <<<default_realm>>> (usually defined in /etc/krb5.conf).
   For example, <<<host/full.qualified.domain.name@REALM.TLD>>> is mapped to <<<host>>>
   by default rule.
 
@@ -201,7 +203,9 @@ KVNO Timestamp         Principal
   Some products such as Apache Oozie which access the services of Hadoop
   on behalf of end users need to be able to impersonate end users.
   You can configure proxy user using properties
-  <<<hadoop.proxyuser.${superuser}.hosts>>> and <<<hadoop.proxyuser.${superuser}.groups>>>.
+  <<<hadoop.proxyuser.${superuser}.hosts>>> along with either or both of 
+  <<<hadoop.proxyuser.${superuser}.groups>>>
+  and <<<hadoop.proxyuser.${superuser}.users>>>.
 
   For example, by specifying as below in core-site.xml,
   user named <<<oozie>>> accessing from any host
@@ -218,6 +222,20 @@ KVNO Timestamp         Principal
   </property>
 ----
 
+  User named <<<oozie>>> accessing from any host
+  can impersonate user1 and user2 by specifying as below in core-site.xml.
+
+----
+  <property>
+    <name>hadoop.proxyuser.oozie.hosts</name>
+    <value>*</value>
+  </property>
+  <property>
+    <name>hadoop.proxyuser.oozie.users</name>
+    <value>user1,user2</value>
+  </property>
+----
+
 ** Secure DataNode
 
   Because the data transfer protocol of DataNode

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathExceptions.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathExceptions.java?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathExceptions.java (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathExceptions.java Mon May 12 12:43:59 2014
@@ -19,11 +19,13 @@
 package org.apache.hadoop.fs.shell;
 
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathIOException;
+import org.apache.hadoop.ipc.RemoteException;
 import org.junit.Test;
 
 public class TestPathExceptions {
@@ -52,5 +54,25 @@ public class TestPathExceptions {
     assertEquals(new Path(path), pe.getPath());
     assertEquals("`" + path + "': " + error, pe.getMessage());
   }
-  
+
+  @Test
+  public void testRemoteExceptionUnwrap() throws Exception {
+    PathIOException pe;
+    RemoteException re;
+    IOException ie;
+    
+    pe = new PathIOException(path);
+    re = new RemoteException(PathIOException.class.getName(), "test constructor1");
+    ie = re.unwrapRemoteException();
+    assertTrue(ie instanceof PathIOException);
+    ie = re.unwrapRemoteException(PathIOException.class);
+    assertTrue(ie instanceof PathIOException);
+
+    pe = new PathIOException(path, "constructor2");
+    re = new RemoteException(PathIOException.class.getName(), "test constructor2");
+    ie = re.unwrapRemoteException();
+    assertTrue(ie instanceof PathIOException);
+    ie = re.unwrapRemoteException(PathIOException.class);
+    assertTrue(ie instanceof PathIOException);    
+  }
 }

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java Mon May 12 12:43:59 2014
@@ -259,13 +259,6 @@ public class TestHttpServer extends Http
     conn.connect();
     assertEquals(200, conn.getResponseCode());
     assertEquals("text/html; charset=utf-8", conn.getContentType());
-
-    // JSPs should default to text/html with utf8
-    servletUrl = new URL(baseUrl, "/testjsp.jsp");
-    conn = (HttpURLConnection)servletUrl.openConnection();
-    conn.connect();
-    assertEquals(200, conn.getResponseCode());
-    assertEquals("text/html; charset=utf-8", conn.getContentType());
   }
 
   /**

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java Mon May 12 12:43:59 2014
@@ -39,6 +39,7 @@ public class TestProxyUsers {
     LogFactory.getLog(TestProxyUsers.class);
   private static final String REAL_USER_NAME = "proxier";
   private static final String PROXY_USER_NAME = "proxied_user";
+  private static final String AUTHORIZED_PROXY_USER_NAME = "authorized_proxied_user";
   private static final String[] GROUP_NAMES =
     new String[] { "foo_group" };
   private static final String[] NETGROUP_NAMES =
@@ -158,7 +159,41 @@ public class TestProxyUsers {
     // From bad IP
     assertNotAuthorized(proxyUserUgi, "1.2.3.5");
   }
+  
+  @Test
+  public void testProxyUsersWithUserConf() throws Exception {
+    Configuration conf = new Configuration();
+    conf.set(
+      ProxyUsers.getProxySuperuserUserConfKey(REAL_USER_NAME),
+      StringUtils.join(",", Arrays.asList(AUTHORIZED_PROXY_USER_NAME)));
+    conf.set(
+      ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME),
+      PROXY_IP);
+    ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
 
+
+    // First try proxying a user that's allowed
+    UserGroupInformation realUserUgi = UserGroupInformation
+        .createRemoteUser(REAL_USER_NAME);
+    UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
+        AUTHORIZED_PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
+
+    // From good IP
+    assertAuthorized(proxyUserUgi, "1.2.3.4");
+    // From bad IP
+    assertNotAuthorized(proxyUserUgi, "1.2.3.5");
+
+    // Now try proxying a user that's not allowed
+    realUserUgi = UserGroupInformation.createRemoteUser(REAL_USER_NAME);
+    proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
+        PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
+    
+    // From good IP
+    assertNotAuthorized(proxyUserUgi, "1.2.3.4");
+    // From bad IP
+    assertNotAuthorized(proxyUserUgi, "1.2.3.5");
+  }
+  
   @Test
   public void testWildcardGroup() {
     Configuration conf = new Configuration();
@@ -192,6 +227,40 @@ public class TestProxyUsers {
     // From bad IP
     assertNotAuthorized(proxyUserUgi, "1.2.3.5");
   }
+  
+  @Test
+  public void testWildcardUser() {
+    Configuration conf = new Configuration();
+    conf.set(
+      ProxyUsers.getProxySuperuserUserConfKey(REAL_USER_NAME),
+      "*");
+    conf.set(
+      ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME),
+      PROXY_IP);
+    ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
+
+    // First try proxying a user that's allowed
+    UserGroupInformation realUserUgi = UserGroupInformation
+        .createRemoteUser(REAL_USER_NAME);
+    UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
+        AUTHORIZED_PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
+
+    // From good IP
+    assertAuthorized(proxyUserUgi, "1.2.3.4");
+    // From bad IP
+    assertNotAuthorized(proxyUserUgi, "1.2.3.5");
+
+    // Now try proxying a different user (just to make sure we aren't getting spill over
+    // from the other test case!)
+    realUserUgi = UserGroupInformation.createRemoteUser(REAL_USER_NAME);
+    proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
+        PROXY_USER_NAME, realUserUgi, OTHER_GROUP_NAMES);
+    
+    // From good IP
+    assertAuthorized(proxyUserUgi, "1.2.3.4");
+    // From bad IP
+    assertNotAuthorized(proxyUserUgi, "1.2.3.5");
+  }
 
   @Test
   public void testWildcardIP() {
@@ -282,7 +351,7 @@ public class TestProxyUsers {
     try {
       ProxyUsers.authorize(proxyUgi, host);
     } catch (AuthorizationException e) {
-      fail("Did not allowed authorization of " + proxyUgi + " from " + host);
+      fail("Did not allow authorization of " + proxyUgi + " from " + host);
     }
   }
 }

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/JarFinder.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/JarFinder.java?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/JarFinder.java (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/JarFinder.java Mon May 12 12:43:59 2014
@@ -39,17 +39,24 @@ import java.util.zip.ZipOutputStream;
  */
 public class JarFinder {
 
-  private static void copyToZipStream(InputStream is, ZipEntry entry,
+  private static void copyToZipStream(File file, ZipEntry entry,
                               ZipOutputStream zos) throws IOException {
-    zos.putNextEntry(entry);
-    byte[] arr = new byte[4096];
-    int read = is.read(arr);
-    while (read > -1) {
-      zos.write(arr, 0, read);
-      read = is.read(arr);
+    InputStream is = new FileInputStream(file);
+    try {
+      zos.putNextEntry(entry);
+      byte[] arr = new byte[4096];
+      int read = is.read(arr);
+      while (read > -1) {
+        zos.write(arr, 0, read);
+        read = is.read(arr);
+      }
+    } finally {
+      try {
+        is.close();
+      } finally {
+        zos.closeEntry();
+      }
     }
-    is.close();
-    zos.closeEntry();
   }
 
   public static void jarDir(File dir, String relativePath, ZipOutputStream zos)
@@ -66,8 +73,7 @@ public class JarFinder {
       new Manifest().write(new BufferedOutputStream(zos));
       zos.closeEntry();
     } else {
-      InputStream is = new FileInputStream(manifestFile);
-      copyToZipStream(is, manifestEntry, zos);
+      copyToZipStream(manifestFile, manifestEntry, zos);
     }
     zos.closeEntry();
     zipDir(dir, relativePath, zos, true);
@@ -94,8 +100,7 @@ public class JarFinder {
           String path = relativePath + f.getName();
           if (!path.equals(JarFile.MANIFEST_NAME)) {
             ZipEntry anEntry = new ZipEntry(path);
-            InputStream is = new FileInputStream(f);
-            copyToZipStream(is, anEntry, zos);
+            copyToZipStream(f, anEntry, zos);
           }
         }
       }

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java Mon May 12 12:43:59 2014
@@ -393,18 +393,22 @@ public class MiniKdc {
     map.put("4", bindAddress);
 
     ClassLoader cl = Thread.currentThread().getContextClassLoader();
-    InputStream is = cl.getResourceAsStream("minikdc.ldiff");
+    InputStream is1 = cl.getResourceAsStream("minikdc.ldiff");
 
     SchemaManager schemaManager = ds.getSchemaManager();
-    final String content = StrSubstitutor.replace(IOUtils.toString(is), map);
-    LdifReader reader = new LdifReader(new StringReader(content));
+    LdifReader reader = null;
+
     try {
+      final String content = StrSubstitutor.replace(IOUtils.toString(is1), map);
+      reader = new LdifReader(new StringReader(content));
+
       for (LdifEntry ldifEntry : reader) {
         ds.getAdminSession().add(new DefaultEntry(schemaManager,
                 ldifEntry.getEntry()));
       }
     } finally {
-      reader.close();
+      IOUtils.closeQuietly(reader);
+      IOUtils.closeQuietly(is1);
     }
 
     kdc = new KdcServer();
@@ -429,14 +433,23 @@ public class MiniKdc {
     kdc.start();
 
     StringBuilder sb = new StringBuilder();
-    is = cl.getResourceAsStream("minikdc-krb5.conf");
-    BufferedReader r = new BufferedReader(new InputStreamReader(is));
-    String line = r.readLine();
-    while (line != null) {
-      sb.append(line).append("{3}");
-      line = r.readLine();
+    InputStream is2 = cl.getResourceAsStream("minikdc-krb5.conf");
+
+    BufferedReader r = null;
+
+    try {
+      r = new BufferedReader(new InputStreamReader(is2));
+      String line = r.readLine();
+
+      while (line != null) {
+        sb.append(line).append("{3}");
+        line = r.readLine();
+      }
+    } finally {
+      IOUtils.closeQuietly(r);
+      IOUtils.closeQuietly(is2);
     }
-    r.close();
+
     krb5conf = new File(workDir, "krb5.conf").getAbsoluteFile();
     FileUtils.writeStringToFile(krb5conf,
             MessageFormat.format(sb.toString(), getRealm(), getHost(),
@@ -555,4 +568,4 @@ public class MiniKdc {
     keytab.setEntries(entries);
     keytab.write(keytabFile);
   }
-}
\ No newline at end of file
+}

Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/pom.xml?rev=1593948&r1=1593947&r2=1593948&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2006/hadoop-common-project/pom.xml (original)
+++ hadoop/common/branches/HDFS-2006/hadoop-common-project/pom.xml Mon May 12 12:43:59 2014
@@ -37,6 +37,7 @@
     <module>hadoop-annotations</module>
     <module>hadoop-nfs</module>
     <module>hadoop-minikdc</module>
+    <module>hadoop-kms</module>
   </modules>
 
   <build>



Mime
View raw message