hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cl...@apache.org
Subject svn commit: r1598435 - in /hadoop/common/branches/fs-encryption/hadoop-common-project: hadoop-auth/src/site/apt/ hadoop-common/ hadoop-common/src/main/docs/ hadoop-common/src/main/java/ hadoop-common/src/main/java/org/apache/hadoop/conf/ hadoop-common/...
Date Thu, 29 May 2014 22:27:29 GMT
Author: clamb
Date: Thu May 29 22:27:25 2014
New Revision: 1598435

URL: http://svn.apache.org/r1598435
Log:
merge from trunk r1598430

Added:
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/DefaultImpersonationProvider.java
      - copied unchanged from r1598430, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/DefaultImpersonationProvider.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ImpersonationProvider.java
      - copied unchanged from r1598430, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ImpersonationProvider.java
Modified:
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-auth/src/site/apt/BuildingIt.apt.vm
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-auth/src/site/apt/Configuration.apt.vm
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-auth/src/site/apt/Examples.apt.vm
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/docs/   (props changed)
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/   (props changed)
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/site/apt/SingleNodeSetup.apt.vm
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/core/   (props changed)
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/ssl/TestSSLFactory.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-kms/src/site/apt/index.apt.vm
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/IdUserGroup.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/ACCESS3Response.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/nfs3/TestIdUserGroup.java

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-auth/src/site/apt/BuildingIt.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-auth/src/site/apt/BuildingIt.apt.vm?rev=1598435&r1=1598434&r2=1598435&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-auth/src/site/apt/BuildingIt.apt.vm (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-auth/src/site/apt/BuildingIt.apt.vm Thu May 29 22:27:25 2014
@@ -18,8 +18,6 @@
 
 Hadoop Auth, Java HTTP SPNEGO ${project.version} - Building It
 
-  \[ {{{./index.html}Go Back}} \]
-
 * Requirements
 
   * Java 6+
@@ -70,6 +68,3 @@ $ mvn package -Pdocs
 
   The generated documentation is available at
   <<<hadoop-auth/target/site/>>>.
-
-  \[ {{{./index.html}Go Back}} \]
-

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-auth/src/site/apt/Configuration.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-auth/src/site/apt/Configuration.apt.vm?rev=1598435&r1=1598434&r2=1598435&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-auth/src/site/apt/Configuration.apt.vm (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-auth/src/site/apt/Configuration.apt.vm Thu May 29 22:27:25 2014
@@ -20,8 +20,6 @@
 Hadoop Auth, Java HTTP SPNEGO ${project.version} - Server Side
 Configuration
 
-  \[ {{{./index.html}Go Back}} \]
-
 * Server Side Configuration Setup
 
   The AuthenticationFilter filter is Hadoop Auth's server side component.
@@ -241,5 +239,3 @@ Configuration
     ...
 </web-app>
 +---+
-
-  \[ {{{./index.html}Go Back}} \]

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-auth/src/site/apt/Examples.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-auth/src/site/apt/Examples.apt.vm?rev=1598435&r1=1598434&r2=1598435&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-auth/src/site/apt/Examples.apt.vm (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-auth/src/site/apt/Examples.apt.vm Thu May 29 22:27:25 2014
@@ -18,8 +18,6 @@
 
 Hadoop Auth, Java HTTP SPNEGO ${project.version} - Examples
 
-  \[ {{{./index.html}Go Back}} \]
-
 * Accessing a Hadoop Auth protected URL Using a browser
 
   <<IMPORTANT:>> The browser must support HTTP Kerberos SPNEGO. For example,
@@ -133,5 +131,3 @@ You are: user[tucu] principal[tucu@LOCAL
 ....
 
 +---+
-
-  \[ {{{./index.html}Go Back}} \]

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1598435&r1=1598434&r2=1598435&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/CHANGES.txt Thu May 29 22:27:25 2014
@@ -334,6 +334,9 @@ Trunk (Unreleased)
 
     HADOOP-10586. KeyShell doesn't allow setting Options via CLI. (clamb via tucu)
 
+    HADOOP-10625. Trim configuration names when putting/getting them
+    to properties. (Wangda Tan via xgong)
+
   OPTIMIZATIONS
 
     HADOOP-7761. Improve the performance of raw comparisons. (todd)
@@ -413,6 +416,12 @@ Release 2.5.0 - UNRELEASED
     HADOOP-10614. CBZip2InputStream is not threadsafe (Xiangrui Meng via
     Sandy Ryza)
 
+    HADOOP-10618. Remove SingleNodeSetup.apt.vm. (Akira Ajisaka via
+    Arpit Agarwal)
+
+    HADOOP-10448. Support pluggable mechanism to specify proxy user settings.
+    (Benoy Antony via Arpit Agarwal)
+
   OPTIMIZATIONS
 
   BUG FIXES 
@@ -504,6 +513,12 @@ Release 2.5.0 - UNRELEASED
     HADOOP-10489. UserGroupInformation#getTokens and UserGroupInformation
     #addToken can lead to ConcurrentModificationException (Robert Kanter via atm)
 
+    HADOOP-10602. Documentation has broken "Go Back" hyperlinks.
+    (Akira AJISAKA via cnauroth)
+
+    HADOOP-10639. FileBasedKeyStoresFactory initialization is not using default
+    for SSL_REQUIRE_CLIENT_CERT_KEY. (tucu)
+
 Release 2.4.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Propchange: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1596816-1598430

Propchange: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1596816-1598430

Propchange: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1596816-1598430

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java?rev=1598435&r1=1598434&r2=1598435&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java Thu May 29 22:27:25 2014
@@ -566,6 +566,9 @@ public class Configuration implements It
    */
   private String[] handleDeprecation(DeprecationContext deprecations,
       String name) {
+    if (null != name) {
+      name = name.trim();
+    }
     ArrayList<String > names = new ArrayList<String>();
 	if (isDeprecated(name)) {
       DeprecatedKeyInfo keyInfo = deprecations.getDeprecatedKeyMap().get(name);
@@ -843,12 +846,12 @@ public class Configuration implements It
   /**
    * Get the value of the <code>name</code> property, <code>null</code> if
    * no such property exists. If the key is deprecated, it returns the value of
-   * the first key which replaces the deprecated key and is not null
+   * the first key which replaces the deprecated key and is not null.
    * 
    * Values are processed for <a href="#VariableExpansion">variable expansion</a> 
    * before being returned. 
    * 
-   * @param name the property name.
+   * @param name the property name, will be trimmed before get value.
    * @return the value of the <code>name</code> or its replacing property, 
    *         or null if no such property exists.
    */
@@ -952,7 +955,8 @@ public class Configuration implements It
   /** 
    * Set the <code>value</code> of the <code>name</code> property. If 
    * <code>name</code> is deprecated or there is a deprecated name associated to it,
-   * it sets the value to both names.
+   * it sets the value to both names. Name will be trimmed before put into
+   * configuration.
    * 
    * @param name property name.
    * @param value property value.
@@ -964,7 +968,8 @@ public class Configuration implements It
   /** 
    * Set the <code>value</code> of the <code>name</code> property. If 
    * <code>name</code> is deprecated, it also sets the <code>value</code> to
-   * the keys that replace the deprecated key.
+   * the keys that replace the deprecated key. Name will be trimmed before put
+   * into configuration.
    *
    * @param name property name.
    * @param value property value.
@@ -979,6 +984,7 @@ public class Configuration implements It
     Preconditions.checkArgument(
         value != null,
         "The value of property " + name + " must not be null");
+    name = name.trim();
     DeprecationContext deprecations = deprecationContext.get();
     if (deprecations.getDeprecatedKeyMap().isEmpty()) {
       getProps();
@@ -1064,7 +1070,7 @@ public class Configuration implements It
    * If no such property exists,
    * then <code>defaultValue</code> is returned.
    * 
-   * @param name property name.
+   * @param name property name, will be trimmed before get value.
    * @param defaultValue default value.
    * @return property value, or <code>defaultValue</code> if the property 
    *         doesn't exist.                    

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java?rev=1598435&r1=1598434&r2=1598435&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java Thu May 29 22:27:25 2014
@@ -293,5 +293,8 @@ public class CommonConfigurationKeysPubl
       "hadoop.security.crypto.buffer.size";
   /** Defalt value for HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_KEY */
   public static final int HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_DEFAULT = 8192;
+  /** Class to override Impersonation provider */
+  public static final String  HADOOP_SECURITY_IMPERSONATION_PROVIDER_CLASS =
+    "hadoop.security.impersonation.provider.class";
 }
 

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java?rev=1598435&r1=1598434&r2=1598435&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java Thu May 29 22:27:25 2014
@@ -2140,9 +2140,21 @@ public abstract class FileSystem extends
    *  in the corresponding FileSystem.
    */
   public FileChecksum getFileChecksum(Path f) throws IOException {
+    return getFileChecksum(f, Long.MAX_VALUE);
+  }
+
+  /**
+   * Get the checksum of a file, from the beginning of the file till the
+   * specific length.
+   * @param f The file path
+   * @param length The length of the file range for checksum calculation
+   * @return The file checksum.
+   */
+  public FileChecksum getFileChecksum(Path f, final long length)
+      throws IOException {
     return null;
   }
-  
+
   /**
    * Set the verify checksum flag. This is only applicable if the 
    * corresponding FileSystem supports checksum. By default doesn't do anything.

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java?rev=1598435&r1=1598434&r2=1598435&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java Thu May 29 22:27:25 2014
@@ -31,7 +31,6 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.permission.AclEntry;
 import org.apache.hadoop.fs.permission.AclStatus;
 import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.Options.ChecksumOpt;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.util.Progressable;
@@ -428,7 +427,12 @@ public class FilterFileSystem extends Fi
   public FileChecksum getFileChecksum(Path f) throws IOException {
     return fs.getFileChecksum(f);
   }
-  
+
+  @Override
+  public FileChecksum getFileChecksum(Path f, long length) throws IOException {
+    return fs.getFileChecksum(f, length);
+  }
+
   @Override
   public void setVerifyChecksum(boolean verifyChecksum) {
     fs.setVerifyChecksum(verifyChecksum);

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java?rev=1598435&r1=1598434&r2=1598435&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java Thu May 29 22:27:25 2014
@@ -687,7 +687,7 @@ public class HarFileSystem extends FileS
    * @return null since no checksum algorithm is implemented.
    */
   @Override
-  public FileChecksum getFileChecksum(Path f) {
+  public FileChecksum getFileChecksum(Path f, long length) {
     return null;
   }
 

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java?rev=1598435&r1=1598434&r2=1598435&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java Thu May 29 22:27:25 2014
@@ -18,42 +18,35 @@
 
 package org.apache.hadoop.security.authorize;
 
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Map.Entry;
-
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.security.Groups;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.ReflectionUtils;
 
 import com.google.common.annotations.VisibleForTesting;
 
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce", "HBase", "Hive"})
 public class ProxyUsers {
 
-  private static final String CONF_HOSTS = ".hosts";
-  private static final String CONF_USERS = ".users";
-  private static final String CONF_GROUPS = ".groups";
-  private static final String CONF_HADOOP_PROXYUSER = "hadoop.proxyuser.";
-  private static final String CONF_HADOOP_PROXYUSER_RE = "hadoop\\.proxyuser\\.";
-  
-  private static boolean init = false;
-  //list of users, groups and hosts per proxyuser
-  private static Map<String, Collection<String>> proxyUsers =
-    new HashMap<String, Collection<String>>();
-  private static Map<String, Collection<String>> proxyGroups = 
-    new HashMap<String, Collection<String>>();
-  private static Map<String, Collection<String>> proxyHosts = 
-    new HashMap<String, Collection<String>>();
+  private static volatile ImpersonationProvider sip ;
 
   /**
-   * reread the conf and get new values for "hadoop.proxyuser.*.groups/users/hosts"
+   * Returns an instance of ImpersonationProvider.
+   * Looks up the configuration to see if there is custom class specified.
+   * @param conf
+   * @return ImpersonationProvider
+   */
+  private static ImpersonationProvider getInstance(Configuration conf) {
+    Class<? extends ImpersonationProvider> clazz =
+        conf.getClass(
+            CommonConfigurationKeysPublic.HADOOP_SECURITY_IMPERSONATION_PROVIDER_CLASS,
+            DefaultImpersonationProvider.class, ImpersonationProvider.class);
+    return ReflectionUtils.newInstance(clazz, conf);
+  }
+
+  /**
+   * refresh Impersonation rules
    */
   public static void refreshSuperUserGroupsConfiguration() {
     //load server side configuration;
@@ -64,149 +57,28 @@ public class ProxyUsers {
    * refresh configuration
    * @param conf
    */
-  public static synchronized void refreshSuperUserGroupsConfiguration(Configuration conf) {
-    
-    // remove all existing stuff
-    proxyGroups.clear();
-    proxyHosts.clear();
-    proxyUsers.clear();
-    
-    // get all the new keys for users
-    String regex = CONF_HADOOP_PROXYUSER_RE+"[^.]*\\"+CONF_USERS;
-    Map<String,String> allMatchKeys = conf.getValByRegex(regex);
-    for(Entry<String, String> entry : allMatchKeys.entrySet()) {  
-        Collection<String> users = StringUtils.getTrimmedStringCollection(entry.getValue());
-        proxyUsers.put(entry.getKey(), users);
-      }
-
-    // get all the new keys for groups
-    regex = CONF_HADOOP_PROXYUSER_RE+"[^.]*\\"+CONF_GROUPS;
-    allMatchKeys = conf.getValByRegex(regex);
-    for(Entry<String, String> entry : allMatchKeys.entrySet()) {
-      Collection<String> groups = StringUtils.getTrimmedStringCollection(entry.getValue());
-      proxyGroups.put(entry.getKey(), groups );
-      //cache the groups. This is needed for NetGroups
-      Groups.getUserToGroupsMappingService(conf).cacheGroupsAdd(
-          new ArrayList<String>(groups));
-    }
-
-    // now hosts
-    regex = CONF_HADOOP_PROXYUSER_RE+"[^.]*\\"+CONF_HOSTS;
-    allMatchKeys = conf.getValByRegex(regex);
-    for(Entry<String, String> entry : allMatchKeys.entrySet()) {
-      proxyHosts.put(entry.getKey(),
-          StringUtils.getTrimmedStringCollection(entry.getValue()));
-    }
-    init = true;
+  public static void refreshSuperUserGroupsConfiguration(Configuration conf) { 
+    // sip is volatile. Any assignment to it as well as the object's state
+    // will be visible to all the other threads. 
+    sip = getInstance(conf);
     ProxyServers.refresh(conf);
   }
   
   /**
-   * Returns configuration key for effective users allowed for a superuser
-   * 
-   * @param userName name of the superuser
-   * @return configuration key for superuser users
-   */
-  public static String getProxySuperuserUserConfKey(String userName) {
-    return ProxyUsers.CONF_HADOOP_PROXYUSER+userName+ProxyUsers.CONF_USERS;
-  }
-  
-  /**
-   * Returns configuration key for effective user groups allowed for a superuser
-   * 
-   * @param userName name of the superuser
-   * @return configuration key for superuser groups
-   */
-  public static String getProxySuperuserGroupConfKey(String userName) {
-    return ProxyUsers.CONF_HADOOP_PROXYUSER+userName+ProxyUsers.CONF_GROUPS;
-  }
-  
-  /**
-   * Return configuration key for superuser ip addresses
-   * 
-   * @param userName name of the superuser
-   * @return configuration key for superuser ip-addresses
-   */
-  public static String getProxySuperuserIpConfKey(String userName) {
-    return ProxyUsers.CONF_HADOOP_PROXYUSER+userName+ProxyUsers.CONF_HOSTS;
-  }
-  
-  /**
    * Authorize the superuser which is doing doAs
    * 
    * @param user ugi of the effective or proxy user which contains a real user
    * @param remoteAddress the ip address of client
    * @throws AuthorizationException
    */
-  public static synchronized void authorize(UserGroupInformation user, 
+  public static void authorize(UserGroupInformation user, 
       String remoteAddress) throws AuthorizationException {
-
-    if(!init) {
+    if (sip==null) {
+      // In a race situation, It is possible for multiple threads to satisfy this condition.
+      // The last assignment will prevail.
       refreshSuperUserGroupsConfiguration(); 
     }
-
-    if (user.getRealUser() == null) {
-      return;
-    }
-    boolean userAuthorized = false;
-    boolean ipAuthorized = false;
-    UserGroupInformation superUser = user.getRealUser();
-    
-    Collection<String> allowedUsers = proxyUsers.get(
-        getProxySuperuserUserConfKey(superUser.getShortUserName()));
-
-    if (isWildcardList(allowedUsers)) {
-      userAuthorized = true;
-    } else if (allowedUsers != null && !allowedUsers.isEmpty()) {
-      if (allowedUsers.contains(user.getShortUserName())) {
-        userAuthorized = true;
-      }
-    }
-
-    if (!userAuthorized) {
-      Collection<String> allowedUserGroups = proxyGroups.get(
-          getProxySuperuserGroupConfKey(superUser.getShortUserName()));
-      
-      if (isWildcardList(allowedUserGroups)) {
-        userAuthorized = true;
-      } else if (allowedUserGroups != null && !allowedUserGroups.isEmpty()) {
-        for (String group : user.getGroupNames()) {
-          if (allowedUserGroups.contains(group)) {
-            userAuthorized = true;
-            break;
-          }
-        }
-      }
-
-      if (!userAuthorized) {
-        throw new AuthorizationException("User: " + superUser.getUserName()
-            + " is not allowed to impersonate " + user.getUserName());
-      }
-    }
-    
-    Collection<String> ipList = proxyHosts.get(
-        getProxySuperuserIpConfKey(superUser.getShortUserName()));
-   
-    if (isWildcardList(ipList)) {
-      ipAuthorized = true;
-    } else if (ipList != null && !ipList.isEmpty()) {
-      for (String allowedHost : ipList) {
-        InetAddress hostAddr;
-        try {
-          hostAddr = InetAddress.getByName(allowedHost);
-        } catch (UnknownHostException e) {
-          continue;
-        }
-        if (hostAddr.getHostAddress().equals(remoteAddress)) {
-          // Authorization is successful
-          ipAuthorized = true;
-        }
-      }
-    }
-    if (!ipAuthorized) {
-      throw new AuthorizationException("Unauthorized connection for super-user: "
-          + superUser.getUserName() + " from IP " + remoteAddress);
-    }
+    sip.authorize(user, remoteAddress);
   }
   
   /**
@@ -218,33 +90,14 @@ public class ProxyUsers {
    * @deprecated use {@link #authorize(UserGroupInformation, String) instead. 
    */
   @Deprecated
-  public static synchronized void authorize(UserGroupInformation user, 
+  public static void authorize(UserGroupInformation user, 
       String remoteAddress, Configuration conf) throws AuthorizationException {
     authorize(user,remoteAddress);
   }
-
-  /**
-   * Return true if the configuration specifies the special configuration value
-   * "*", indicating that any group or host list is allowed to use this configuration.
-   */
-  private static boolean isWildcardList(Collection<String> list) {
-    return (list != null) &&
-      (list.size() == 1) &&
-      (list.contains("*"));
-  }
-   
-  @VisibleForTesting
-  public static Map<String, Collection<String>> getProxyUsers() {
-    return proxyUsers;
-  }
-
-  @VisibleForTesting
-  public static Map<String, Collection<String>> getProxyGroups() {
-    return proxyGroups;
-  }
-
-  @VisibleForTesting
-  public static Map<String, Collection<String>> getProxyHosts() {
-    return proxyHosts;
+  
+  @VisibleForTesting 
+  public static DefaultImpersonationProvider getDefaultImpersonationProvider() {
+    return ((DefaultImpersonationProvider)sip);
   }
+      
 }

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java?rev=1598435&r1=1598434&r2=1598435&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java Thu May 29 22:27:25 2014
@@ -131,7 +131,8 @@ public class FileBasedKeyStoresFactory i
     throws IOException, GeneralSecurityException {
 
     boolean requireClientCert =
-      conf.getBoolean(SSLFactory.SSL_REQUIRE_CLIENT_CERT_KEY, true);
+      conf.getBoolean(SSLFactory.SSL_REQUIRE_CLIENT_CERT_KEY,
+          SSLFactory.DEFAULT_SSL_REQUIRE_CLIENT_CERT);
 
     // certificate store
     String keystoreType =

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml?rev=1598435&r1=1598434&r2=1598435&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml Thu May 29 22:27:25 2014
@@ -723,6 +723,17 @@
 <!-- Proxy Configuration -->
 
 <property>
+  <name>hadoop.security.impersonation.provider.class</name>
+  <value></value>
+  <description>A class which implements ImpersonationProvider interface, used to 
+       authorize whether one user can impersonate a specific user. 
+       If not specified, the DefaultImpersonationProvider will be used. 
+       If a class is specified, then that class will be used to determine 
+       the impersonation capability.
+  </description>
+</property>
+
+<property>
   <name>hadoop.rpc.socket.factory.class.default</name>
   <value>org.apache.hadoop.net.StandardSocketFactory</value>
   <description> Default SocketFactory to use. This parameter is expected to be

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/site/apt/SingleNodeSetup.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/site/apt/SingleNodeSetup.apt.vm?rev=1598435&r1=1598434&r2=1598435&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/site/apt/SingleNodeSetup.apt.vm (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/site/apt/SingleNodeSetup.apt.vm Thu May 29 22:27:25 2014
@@ -18,210 +18,7 @@
 
 Single Node Setup
 
-%{toc|section=1|fromDepth=0}
+  This page will be removed in the next major release.
 
-* Purpose
-
-   This document describes how to set up and configure a single-node
-   Hadoop installation so that you can quickly perform simple operations
-   using Hadoop MapReduce and the Hadoop Distributed File System (HDFS).
-
-* Prerequisites
-
-** Supported Platforms
-
-     * GNU/Linux is supported as a development and production platform.
-       Hadoop has been demonstrated on GNU/Linux clusters with 2000 nodes.
-
-     * Windows is also a supported platform.
-
-** Required Software
-
-   Required software for Linux and Windows include:
-
-    [[1]] Java^TM 1.6.x, preferably from Sun, must be installed.
-
-    [[2]] ssh must be installed and sshd must be running to use the Hadoop
-       scripts that manage remote Hadoop daemons.
-
-** Installing Software
-
-   If your cluster doesn't have the requisite software you will need to
-   install it.
-
-   For example on Ubuntu Linux:
-
-----
-   $ sudo apt-get install ssh
-   $ sudo apt-get install rsync
-----
-
-* Download
-
-   To get a Hadoop distribution, download a recent stable release from one
-   of the Apache Download Mirrors.
-
-* Prepare to Start the Hadoop Cluster
-
-   Unpack the downloaded Hadoop distribution. In the distribution, edit
-   the file <<<conf/hadoop-env.sh>>> to define at least <<<JAVA_HOME>>> to be the root
-   of your Java installation.
-
-   Try the following command:
-
-----
-   $ bin/hadoop
-----
-
-   This will display the usage documentation for the hadoop script.
-
-   Now you are ready to start your Hadoop cluster in one of the three
-   supported modes:
-
-     * Local (Standalone) Mode
-
-     * Pseudo-Distributed Mode
-
-     * Fully-Distributed Mode
-
-* Standalone Operation
-
-   By default, Hadoop is configured to run in a non-distributed mode, as a
-   single Java process. This is useful for debugging.
-
-   The following example copies the unpacked conf directory to use as
-   input and then finds and displays every match of the given regular
-   expression. Output is written to the given output directory.
-
-----
-   $ mkdir input
-   $ cp conf/*.xml input
-   $ bin/hadoop jar hadoop-*-examples.jar grep input output 'dfs[a-z.]+'
-   $ cat output/*
----
-
-* Pseudo-Distributed Operation
-
-   Hadoop can also be run on a single-node in a pseudo-distributed mode
-   where each Hadoop daemon runs in a separate Java process.
-
-** Configuration
-
-   Use the following:
-
-   conf/core-site.xml:
-
-----
-<configuration>
-     <property>
-         <name>fs.defaultFS</name>
-         <value>hdfs://localhost:9000</value>
-     </property>
-</configuration>
-----
-
-   conf/hdfs-site.xml:
-
-----
-<configuration>
-     <property>
-         <name>dfs.replication</name>
-         <value>1</value>
-     </property>
-</configuration>
-----
-
-   conf/mapred-site.xml:
-
-----
-<configuration>
-     <property>
-         <name>mapred.job.tracker</name>
-         <value>localhost:9001</value>
-     </property>
-</configuration>
-----
-
-** Setup passphraseless ssh
-
-   Now check that you can ssh to the localhost without a passphrase:
-
-----
-   $ ssh localhost
-----
-
-   If you cannot ssh to localhost without a passphrase, execute the
-   following commands:
-
-----
-   $ ssh-keygen -t dsa -P '' -f ~/.ssh/id_dsa
-   $ cat ~/.ssh/id_dsa.pub >> ~/.ssh/authorized_keys
-----
-
-** Execution
-
-   Format a new distributed-filesystem:
-
-----
-   $ bin/hadoop namenode -format
-----
-
-   Start the hadoop daemons:
-
-----
-   $ bin/start-all.sh
-----
-
-   The hadoop daemon log output is written to the <<<${HADOOP_LOG_DIR}>>>
-   directory (defaults to <<<${HADOOP_PREFIX}/logs>>>).
-
-   Browse the web interface for the NameNode and the JobTracker; by
-   default they are available at:
-
-     * NameNode - <<<http://localhost:50070/>>>
-
-     * JobTracker - <<<http://localhost:50030/>>>
-
-   Copy the input files into the distributed filesystem:
-
-----
-   $ bin/hadoop fs -put conf input
-----
-
-   Run some of the examples provided:
-
-----
-   $ bin/hadoop jar hadoop-*-examples.jar grep input output 'dfs[a-z.]+'
-----
-
-   Examine the output files:
-
-   Copy the output files from the distributed filesystem to the local
-   filesytem and examine them:
-
-----
-   $ bin/hadoop fs -get output output
-   $ cat output/*
-----
-
-   or
-
-   View the output files on the distributed filesystem:
-
-----
-   $ bin/hadoop fs -cat output/*
-----
-
-   When you're done, stop the daemons with:
-
-----
-   $ bin/stop-all.sh
-----
-
-* Fully-Distributed Operation
-
-   For information on setting up fully-distributed, non-trivial clusters
-   see {{{./ClusterSetup.html}Cluster Setup}}.
-
-   Java and JNI are trademarks or registered trademarks of Sun
-   Microsystems, Inc. in the United States and other countries.
+  See {{{./SingleCluster.html}Single Cluster Setup}} to set up and configure a
+  single-node Hadoop installation.

Propchange: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1596816-1598430

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java?rev=1598435&r1=1598434&r2=1598435&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java Thu May 29 22:27:25 2014
@@ -49,7 +49,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.net.NetUtils;
 import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
-import org.codehaus.jackson.map.ObjectMapper; 
+import org.codehaus.jackson.map.ObjectMapper;
 
 public class TestConfiguration extends TestCase {
 
@@ -1003,6 +1003,14 @@ public class TestConfiguration extends T
     String resource;
   }
   
+  public void testGetSetTrimmedNames() throws IOException {
+    Configuration conf = new Configuration(false);
+    conf.set(" name", "value");
+    assertEquals("value", conf.get("name"));
+    assertEquals("value", conf.get(" name"));
+    assertEquals("value", conf.getRaw("  name  "));
+  }
+
   public void testDumpConfiguration () throws IOException {
     StringWriter outWriter = new StringWriter();
     Configuration.dumpConfiguration(conf, outWriter);

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java?rev=1598435&r1=1598434&r2=1598435&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java Thu May 29 22:27:25 2014
@@ -139,6 +139,7 @@ public class TestHarFileSystem {
     public int getDefaultPort();
     public String getCanonicalServiceName();
     public Token<?> getDelegationToken(String renewer) throws IOException;
+    public FileChecksum getFileChecksum(Path f) throws IOException;
     public boolean deleteOnExit(Path f) throws IOException;
     public boolean cancelDeleteOnExit(Path f) throws IOException;
     public Token<?>[] addDelegationTokens(String renewer, Credentials creds)
@@ -223,10 +224,16 @@ public class TestHarFileSystem {
   }
 
   @Test
-  public void testFileChecksum() {
+  public void testFileChecksum() throws Exception {
     final Path p = new Path("har://file-localhost/foo.har/file1");
     final HarFileSystem harfs = new HarFileSystem();
-    Assert.assertEquals(null, harfs.getFileChecksum(p));
+    try {
+      Assert.assertEquals(null, harfs.getFileChecksum(p));
+    } finally {
+      if (harfs != null) {
+        harfs.close();
+      }
+    }
   }
 
   /**

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java?rev=1598435&r1=1598434&r2=1598435&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java Thu May 29 22:27:25 2014
@@ -35,6 +35,7 @@ import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.KerberosInfo;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authorize.DefaultImpersonationProvider;
 import org.apache.hadoop.security.authorize.ProxyUsers;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenInfo;
@@ -326,7 +327,7 @@ public class MiniRPCBenchmark {
     String shortUserName =
       UserGroupInformation.createRemoteUser(user).getShortUserName();
     try {
-      conf.setStrings(ProxyUsers.getProxySuperuserGroupConfKey(shortUserName),
+      conf.setStrings(DefaultImpersonationProvider.getProxySuperuserGroupConfKey(shortUserName),
           GROUP_NAME_1);
       configureSuperUserIPAddresses(conf, shortUserName);
       // start the server
@@ -410,7 +411,7 @@ public class MiniRPCBenchmark {
     }
     builder.append("127.0.1.1,");
     builder.append(InetAddress.getLocalHost().getCanonicalHostName());
-    conf.setStrings(ProxyUsers.getProxySuperuserIpConfKey(superUserShortName),
+    conf.setStrings(DefaultImpersonationProvider.getProxySuperuserIpConfKey(superUserShortName),
         builder.toString());
   }
 }

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java?rev=1598435&r1=1598434&r2=1598435&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java Thu May 29 22:27:25 2014
@@ -35,6 +35,7 @@ import org.apache.hadoop.ipc.Server;
 import org.apache.hadoop.ipc.VersionedProtocol;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
+import org.apache.hadoop.security.authorize.DefaultImpersonationProvider;
 import org.apache.hadoop.security.authorize.ProxyUsers;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenInfo;
@@ -100,7 +101,7 @@ public class TestDoAsEffectiveUser {
     builder.append("127.0.1.1,");
     builder.append(InetAddress.getLocalHost().getCanonicalHostName());
     LOG.info("Local Ip addresses: "+builder.toString());
-    conf.setStrings(ProxyUsers.getProxySuperuserIpConfKey(superUserShortName),
+    conf.setStrings(DefaultImpersonationProvider.getProxySuperuserIpConfKey(superUserShortName),
         builder.toString());
   }
   
@@ -180,7 +181,7 @@ public class TestDoAsEffectiveUser {
   @Test(timeout=4000)
   public void testRealUserSetup() throws IOException {
     final Configuration conf = new Configuration();
-    conf.setStrings(ProxyUsers
+    conf.setStrings(DefaultImpersonationProvider
         .getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME), "group1");
     configureSuperUserIPAddresses(conf, REAL_USER_SHORT_NAME);
     Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class)
@@ -213,7 +214,7 @@ public class TestDoAsEffectiveUser {
   public void testRealUserAuthorizationSuccess() throws IOException {
     final Configuration conf = new Configuration();
     configureSuperUserIPAddresses(conf, REAL_USER_SHORT_NAME);
-    conf.setStrings(ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME),
+    conf.setStrings(DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME),
         "group1");
     Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class)
         .setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0)
@@ -247,9 +248,9 @@ public class TestDoAsEffectiveUser {
   @Test
   public void testRealUserIPAuthorizationFailure() throws IOException {
     final Configuration conf = new Configuration();
-    conf.setStrings(ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_SHORT_NAME),
+    conf.setStrings(DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_SHORT_NAME),
         "20.20.20.20"); //Authorized IP address
-    conf.setStrings(ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME),
+    conf.setStrings(DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME),
         "group1");
     Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class)
         .setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0)
@@ -292,7 +293,7 @@ public class TestDoAsEffectiveUser {
   @Test
   public void testRealUserIPNotSpecified() throws IOException {
     final Configuration conf = new Configuration();
-    conf.setStrings(ProxyUsers
+    conf.setStrings(DefaultImpersonationProvider
         .getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME), "group1");
     Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class)
         .setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0)
@@ -376,7 +377,7 @@ public class TestDoAsEffectiveUser {
   public void testRealUserGroupAuthorizationFailure() throws IOException {
     final Configuration conf = new Configuration();
     configureSuperUserIPAddresses(conf, REAL_USER_SHORT_NAME);
-    conf.setStrings(ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME),
+    conf.setStrings(DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME),
         "group3");
     Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class)
         .setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0)

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java?rev=1598435&r1=1598434&r2=1598435&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java Thu May 29 22:27:25 2014
@@ -17,6 +17,9 @@
  */
 package org.apache.hadoop.security.authorize;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.Collection;
@@ -25,13 +28,11 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
-import org.apache.hadoop.util.NativeCodeLoader;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.security.Groups;
 import org.apache.hadoop.security.UserGroupInformation;
-
+import org.apache.hadoop.util.NativeCodeLoader;
+import org.apache.hadoop.util.StringUtils;
 import org.junit.Test;
-import static org.junit.Assert.*;
 
 
 public class TestProxyUsers {
@@ -46,6 +47,8 @@ public class TestProxyUsers {
     new String[] { "@foo_group" };
   private static final String[] OTHER_GROUP_NAMES =
     new String[] { "bar_group" };
+  private static final String[] SUDO_GROUP_NAMES =
+    new String[] { "sudo_proxied_user" };
   private static final String PROXY_IP = "1.2.3.4";
 
   /**
@@ -106,10 +109,10 @@ public class TestProxyUsers {
       groupMappingClassName);
 
     conf.set(
-        ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+        DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
         StringUtils.join(",", Arrays.asList(NETGROUP_NAMES)));
     conf.set(
-        ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME),
+        DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
         PROXY_IP);
     
     ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
@@ -130,10 +133,10 @@ public class TestProxyUsers {
   public void testProxyUsers() throws Exception {
     Configuration conf = new Configuration();
     conf.set(
-      ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+      DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
       StringUtils.join(",", Arrays.asList(GROUP_NAMES)));
     conf.set(
-      ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME),
+      DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
       PROXY_IP);
     ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
 
@@ -164,11 +167,11 @@ public class TestProxyUsers {
   public void testProxyUsersWithUserConf() throws Exception {
     Configuration conf = new Configuration();
     conf.set(
-      ProxyUsers.getProxySuperuserUserConfKey(REAL_USER_NAME),
-      StringUtils.join(",", Arrays.asList(AUTHORIZED_PROXY_USER_NAME)));
+        DefaultImpersonationProvider.getProxySuperuserUserConfKey(REAL_USER_NAME),
+        StringUtils.join(",", Arrays.asList(AUTHORIZED_PROXY_USER_NAME)));
     conf.set(
-      ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME),
-      PROXY_IP);
+        DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
+        PROXY_IP);
     ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
 
 
@@ -198,10 +201,10 @@ public class TestProxyUsers {
   public void testWildcardGroup() {
     Configuration conf = new Configuration();
     conf.set(
-      ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+      DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
       "*");
     conf.set(
-      ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME),
+      DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
       PROXY_IP);
     ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
 
@@ -232,10 +235,10 @@ public class TestProxyUsers {
   public void testWildcardUser() {
     Configuration conf = new Configuration();
     conf.set(
-      ProxyUsers.getProxySuperuserUserConfKey(REAL_USER_NAME),
+      DefaultImpersonationProvider.getProxySuperuserUserConfKey(REAL_USER_NAME),
       "*");
     conf.set(
-      ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME),
+      DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
       PROXY_IP);
     ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
 
@@ -266,10 +269,10 @@ public class TestProxyUsers {
   public void testWildcardIP() {
     Configuration conf = new Configuration();
     conf.set(
-      ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+      DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
       StringUtils.join(",", Arrays.asList(GROUP_NAMES)));
     conf.set(
-      ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME),
+      DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
       "*");
     ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
 
@@ -297,15 +300,16 @@ public class TestProxyUsers {
   public void testWithDuplicateProxyGroups() throws Exception {
     Configuration conf = new Configuration();
     conf.set(
-      ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+      DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
       StringUtils.join(",", Arrays.asList(GROUP_NAMES,GROUP_NAMES)));
     conf.set(
-      ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME),
+      DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
       PROXY_IP);
     ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
     
-    Collection<String> groupsToBeProxied = ProxyUsers.getProxyGroups().get(
-        ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_NAME));
+    Collection<String> groupsToBeProxied = 
+        ProxyUsers.getDefaultImpersonationProvider().getProxyGroups().get(
+        DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME));
     
     assertEquals (1,groupsToBeProxied.size());
   }
@@ -314,18 +318,51 @@ public class TestProxyUsers {
   public void testWithDuplicateProxyHosts() throws Exception {
     Configuration conf = new Configuration();
     conf.set(
-      ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+      DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
       StringUtils.join(",", Arrays.asList(GROUP_NAMES)));
     conf.set(
-      ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME),
+      DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
       StringUtils.join(",", Arrays.asList(PROXY_IP,PROXY_IP)));
     ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
     
-    Collection<String> hosts = ProxyUsers.getProxyHosts().get(
-        ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME));
+    Collection<String> hosts = 
+        ProxyUsers.getDefaultImpersonationProvider().getProxyHosts().get(
+        DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME));
     
     assertEquals (1,hosts.size());
   }
+  
+  @Test
+   public void testProxyUsersWithProviderOverride() throws Exception {
+     Configuration conf = new Configuration();
+     conf.set(
+         CommonConfigurationKeysPublic.HADOOP_SECURITY_IMPERSONATION_PROVIDER_CLASS,
+         "org.apache.hadoop.security.authorize.TestProxyUsers$TestDummyImpersonationProvider");
+     ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
+ 
+     // First try proxying a group that's allowed
+     UserGroupInformation realUserUgi = UserGroupInformation
+     .createUserForTesting(REAL_USER_NAME, SUDO_GROUP_NAMES);
+     UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
+         PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
+ 
+     // From good IP
+     assertAuthorized(proxyUserUgi, "1.2.3.4");
+     // From bad IP
+     assertAuthorized(proxyUserUgi, "1.2.3.5");
+ 
+     // Now try proxying a group that's not allowed
+     realUserUgi = UserGroupInformation
+     .createUserForTesting(REAL_USER_NAME, GROUP_NAMES);
+     proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
+         PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
+ 
+     // From good IP
+     assertNotAuthorized(proxyUserUgi, "1.2.3.4");
+     // From bad IP
+     assertNotAuthorized(proxyUserUgi, "1.2.3.5");
+   }
+
 
   private void assertNotAuthorized(UserGroupInformation proxyUgi, String host) {
     try {
@@ -343,4 +380,32 @@ public class TestProxyUsers {
       fail("Did not allow authorization of " + proxyUgi + " from " + host);
     }
   }
+
+  static class TestDummyImpersonationProvider implements ImpersonationProvider {
+    /**
+     * Authorize a user (superuser) to impersonate another user (user1) if the 
+     * superuser belongs to the group "sudo_user1" .
+     */
+
+    public void authorize(UserGroupInformation user, 
+        String remoteAddress) throws AuthorizationException{
+      UserGroupInformation superUser = user.getRealUser();
+
+      String sudoGroupName = "sudo_" + user.getShortUserName();
+      if (!Arrays.asList(superUser.getGroupNames()).contains(sudoGroupName)){
+        throw new AuthorizationException("User: " + superUser.getUserName()
+            + " is not allowed to impersonate " + user.getUserName());
+      }
+    }
+
+    @Override
+    public void setConf(Configuration conf) {
+
+    }
+
+    @Override
+    public Configuration getConf() {
+      return null;
+    }
+  }
 }

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/ssl/TestSSLFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/ssl/TestSSLFactory.java?rev=1598435&r1=1598434&r2=1598435&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/ssl/TestSSLFactory.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/ssl/TestSSLFactory.java Thu May 29 22:27:25 2014
@@ -272,4 +272,17 @@ public class TestSSLFactory {
       sslFactory.destroy();
     }
   }
+
+  @Test
+  public void testNoClientCertsInitialization() throws Exception {
+    Configuration conf = createConfiguration(false);
+    conf.unset(SSLFactory.SSL_REQUIRE_CLIENT_CERT_KEY);
+    SSLFactory sslFactory = new SSLFactory(SSLFactory.Mode.CLIENT, conf);
+    try {
+      sslFactory.init();
+    } finally {
+      sslFactory.destroy();
+    }
+  }
+
 }

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-kms/src/site/apt/index.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-kms/src/site/apt/index.apt.vm?rev=1598435&r1=1598434&r2=1598435&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-kms/src/site/apt/index.apt.vm (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-kms/src/site/apt/index.apt.vm Thu May 29 22:27:25 2014
@@ -483,5 +483,3 @@ Content-Type: application/json
   ...
 ]
 +---+
-
-  \[ {{{./index.html}Go Back}} \]

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/IdUserGroup.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/IdUserGroup.java?rev=1598435&r1=1598434&r2=1598435&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/IdUserGroup.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/IdUserGroup.java Thu May 29 22:27:25 2014
@@ -18,8 +18,14 @@
 package org.apache.hadoop.nfs.nfs3;
 
 import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStreamReader;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -44,6 +50,17 @@ public class IdUserGroup {
   static final String MAC_GET_ALL_USERS_CMD = "dscl . -list /Users UniqueID";
   static final String MAC_GET_ALL_GROUPS_CMD = "dscl . -list /Groups PrimaryGroupID";
 
+  // Used for finding the configured static mapping file.
+  static final String NFS_STATIC_MAPPING_FILE_KEY = "dfs.nfs.static.mapping.file";
+  private static final String NFS_STATIC_MAPPING_FILE_DEFAULT = "/etc/nfs.map";
+  private final File staticMappingFile;
+
+  // Used for parsing the static mapping file.
+  private static final Pattern EMPTY_LINE = Pattern.compile("^\\s*$");
+  private static final Pattern COMMENT_LINE = Pattern.compile("^\\s*#.*$");
+  private static final Pattern MAPPING_LINE =
+      Pattern.compile("^(uid|gid)\\s+(\\d+)\\s+(\\d+)\\s*(#.*)?$");
+
   // Do update every 15 minutes by default
   final static long TIMEOUT_DEFAULT = 15 * 60 * 1000; // ms
   final static long TIMEOUT_MIN = 1 * 60 * 1000; // ms
@@ -58,6 +75,7 @@ public class IdUserGroup {
   
   public IdUserGroup() throws IOException {
     timeout = TIMEOUT_DEFAULT;
+    staticMappingFile = new File(NFS_STATIC_MAPPING_FILE_DEFAULT);
     updateMaps();
   }
   
@@ -71,6 +89,11 @@ public class IdUserGroup {
     } else {
       timeout = updateTime;
     }
+    
+    String staticFilePath = conf.get(NFS_STATIC_MAPPING_FILE_KEY,
+        NFS_STATIC_MAPPING_FILE_DEFAULT);
+    staticMappingFile = new File(staticFilePath);
+    
     updateMaps();
   }
 
@@ -137,7 +160,8 @@ public class IdUserGroup {
    */
   @VisibleForTesting
   public static void updateMapInternal(BiMap<Integer, String> map, String mapName,
-      String command, String regex) throws IOException  {
+      String command, String regex, Map<Integer, Integer> staticMapping)
+      throws IOException  {
     BufferedReader br = null;
     try {
       Process process = Runtime.getRuntime().exec(
@@ -151,7 +175,7 @@ public class IdUserGroup {
         }
         LOG.debug("add to " + mapName + "map:" + nameId[0] + " id:" + nameId[1]);
         // HDFS can't differentiate duplicate names with simple authentication
-        final Integer key = parseId(nameId[1]);        
+        final Integer key = staticMapping.get(parseId(nameId[1]));
         final String value = nameId[0];
         if (map.containsKey(key)) {
           final String prevValue = map.get(key);
@@ -173,7 +197,7 @@ public class IdUserGroup {
         }
         map.put(key, value);
       }
-      LOG.info("Updated " + mapName + " map size:" + map.size());
+      LOG.info("Updated " + mapName + " map size: " + map.size());
       
     } catch (IOException e) {
       LOG.error("Can't update " + mapName + " map");
@@ -199,20 +223,115 @@ public class IdUserGroup {
           + " 'nobody' will be used for any user and group.");
       return;
     }
+    
+    StaticMapping staticMapping = new StaticMapping(
+        new HashMap<Integer, Integer>(), new HashMap<Integer, Integer>());
+    if (staticMappingFile.exists()) {
+      LOG.info("Using '" + staticMappingFile + "' for static UID/GID mapping...");
+      staticMapping = parseStaticMap(staticMappingFile);
+    } else {
+      LOG.info("Not doing static UID/GID mapping because '" + staticMappingFile
+          + "' does not exist.");
+    }
 
     if (OS.startsWith("Linux")) {
-      updateMapInternal(uMap, "user", LINUX_GET_ALL_USERS_CMD, ":");
-      updateMapInternal(gMap, "group", LINUX_GET_ALL_GROUPS_CMD, ":");
+      updateMapInternal(uMap, "user", LINUX_GET_ALL_USERS_CMD, ":",
+          staticMapping.uidMapping);
+      updateMapInternal(gMap, "group", LINUX_GET_ALL_GROUPS_CMD, ":",
+          staticMapping.gidMapping);
     } else {
       // Mac
-      updateMapInternal(uMap, "user", MAC_GET_ALL_USERS_CMD, "\\s+");
-      updateMapInternal(gMap, "group", MAC_GET_ALL_GROUPS_CMD, "\\s+");
+      updateMapInternal(uMap, "user", MAC_GET_ALL_USERS_CMD, "\\s+",
+          staticMapping.uidMapping);
+      updateMapInternal(gMap, "group", MAC_GET_ALL_GROUPS_CMD, "\\s+",
+          staticMapping.gidMapping);
     }
 
     uidNameMap = uMap;
     gidNameMap = gMap;
     lastUpdateTime = Time.monotonicNow();
   }
+  
+  @SuppressWarnings("serial")
+  static final class PassThroughMap<K> extends HashMap<K, K> {
+    
+    public PassThroughMap() {
+      this(new HashMap<K, K>());
+    }
+    
+    public PassThroughMap(Map<K, K> mapping) {
+      super();
+      for (Map.Entry<K, K> entry : mapping.entrySet()) {
+        super.put(entry.getKey(), entry.getValue());
+      }
+    }
+
+    @SuppressWarnings("unchecked")
+    @Override
+    public K get(Object key) {
+      if (super.containsKey(key)) {
+        return super.get(key);
+      } else {
+        return (K) key;
+      }
+    }
+  }
+  
+  @VisibleForTesting
+  static final class StaticMapping {
+    final Map<Integer, Integer> uidMapping;
+    final Map<Integer, Integer> gidMapping;
+    
+    public StaticMapping(Map<Integer, Integer> uidMapping,
+        Map<Integer, Integer> gidMapping) {
+      this.uidMapping = new PassThroughMap<Integer>(uidMapping);
+      this.gidMapping = new PassThroughMap<Integer>(gidMapping);
+    }
+  }
+  
+  static StaticMapping parseStaticMap(File staticMapFile)
+      throws IOException {
+    
+    Map<Integer, Integer> uidMapping = new HashMap<Integer, Integer>();
+    Map<Integer, Integer> gidMapping = new HashMap<Integer, Integer>();
+    
+    BufferedReader in = new BufferedReader(new InputStreamReader(
+        new FileInputStream(staticMapFile)));
+    
+    try {
+      String line = null;
+      while ((line = in.readLine()) != null) {
+        // Skip entirely empty and comment lines.
+        if (EMPTY_LINE.matcher(line).matches() ||
+            COMMENT_LINE.matcher(line).matches()) {
+          continue;
+        }
+        
+        Matcher lineMatcher = MAPPING_LINE.matcher(line);
+        if (!lineMatcher.matches()) {
+          LOG.warn("Could not parse line '" + line + "'. Lines should be of " +
+              "the form '[uid|gid] [remote id] [local id]'. Blank lines and " +
+              "everything following a '#' on a line will be ignored.");
+          continue;
+        }
+        
+        // We know the line is fine to parse without error checking like this
+        // since it matched the regex above.
+        String firstComponent = lineMatcher.group(1);
+        int remoteId = Integer.parseInt(lineMatcher.group(2));
+        int localId = Integer.parseInt(lineMatcher.group(3));
+        if (firstComponent.equals("uid")) {
+          uidMapping.put(localId, remoteId);
+        } else {
+          gidMapping.put(localId, remoteId);
+        }
+      }
+    } finally {
+      in.close();
+    }
+    
+    return new StaticMapping(uidMapping, gidMapping);
+  }
 
   synchronized public int getUid(String user) throws IOException {
     checkAndUpdateMaps();

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/ACCESS3Response.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/ACCESS3Response.java?rev=1598435&r1=1598434&r2=1598435&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/ACCESS3Response.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/ACCESS3Response.java Thu May 29 22:27:25 2014
@@ -46,10 +46,12 @@ public class ACCESS3Response extends NFS
   @Override
   public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
     super.writeHeaderAndResponse(out, xid, verifier);
-    out.writeBoolean(true);
-    postOpAttr.serialize(out);
     if (this.getStatus() == Nfs3Status.NFS3_OK) {
+      out.writeBoolean(true);
+      postOpAttr.serialize(out);
       out.writeInt(access);
+    } else {
+      out.writeBoolean(false);
     }
     return out;
   }

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/nfs3/TestIdUserGroup.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/nfs3/TestIdUserGroup.java?rev=1598435&r1=1598434&r2=1598435&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/nfs3/TestIdUserGroup.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/nfs3/TestIdUserGroup.java Thu May 29 22:27:25 2014
@@ -19,15 +19,97 @@ package org.apache.hadoop.nfs.nfs3;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.OutputStream;
+import java.util.Map;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.nfs.nfs3.IdUserGroup.PassThroughMap;
+import org.apache.hadoop.nfs.nfs3.IdUserGroup.StaticMapping;
 import org.junit.Test;
 
 import com.google.common.collect.BiMap;
 import com.google.common.collect.HashBiMap;
 
 public class TestIdUserGroup {
+  
+  private static final Map<Integer, Integer> EMPTY_PASS_THROUGH_MAP =
+      new PassThroughMap<Integer>();
+  
+  @Test
+  public void testStaticMapParsing() throws IOException {
+    File tempStaticMapFile = File.createTempFile("nfs-", ".map");
+    final String staticMapFileContents =
+        "uid 10 100\n" +
+        "gid 10 200\n" +
+        "uid 11 201 # comment at the end of a line\n" +
+        "uid 12 301\n" +
+        "# Comment at the beginning of a line\n" +
+        "    # Comment that starts late in the line\n" +
+        "uid 10000 10001# line without whitespace before comment\n" +
+        "uid 13 302\n" +
+        "gid\t11\t201\n" + // Tabs instead of spaces.
+        "\n" + // Entirely empty line.
+        "gid 12 202";
+    OutputStream out = new FileOutputStream(tempStaticMapFile);
+    out.write(staticMapFileContents.getBytes());
+    out.close();
+    StaticMapping parsedMap = IdUserGroup.parseStaticMap(tempStaticMapFile);
+    
+    assertEquals(10, (int)parsedMap.uidMapping.get(100));
+    assertEquals(11, (int)parsedMap.uidMapping.get(201));
+    assertEquals(12, (int)parsedMap.uidMapping.get(301));
+    assertEquals(13, (int)parsedMap.uidMapping.get(302));
+    assertEquals(10, (int)parsedMap.gidMapping.get(200));
+    assertEquals(11, (int)parsedMap.gidMapping.get(201));
+    assertEquals(12, (int)parsedMap.gidMapping.get(202));
+    assertEquals(10000, (int)parsedMap.uidMapping.get(10001));
+    // Ensure pass-through of unmapped IDs works.
+    assertEquals(1000, (int)parsedMap.uidMapping.get(1000));
+  }
+  
+  @Test
+  public void testStaticMapping() throws IOException {
+    Map<Integer, Integer> uidStaticMap = new PassThroughMap<Integer>();
+    Map<Integer, Integer> gidStaticMap = new PassThroughMap<Integer>();
+    
+    uidStaticMap.put(11501, 10);
+    gidStaticMap.put(497, 200);
+    
+    // Maps for id to name map
+    BiMap<Integer, String> uMap = HashBiMap.create();
+    BiMap<Integer, String> gMap = HashBiMap.create();
+    
+    String GET_ALL_USERS_CMD =
+        "echo \"atm:x:1000:1000:Aaron T. Myers,,,:/home/atm:/bin/bash\n"
+        + "hdfs:x:11501:10787:Grid Distributed File System:/home/hdfs:/bin/bash\""
+        + " | cut -d: -f1,3";
+    
+    String GET_ALL_GROUPS_CMD = "echo \"hdfs:*:11501:hrt_hdfs\n"
+        + "mapred:x:497\n"
+        + "mapred2:x:498\""
+        + " | cut -d: -f1,3";
+
+    IdUserGroup.updateMapInternal(uMap, "user", GET_ALL_USERS_CMD, ":",
+        uidStaticMap);
+    IdUserGroup.updateMapInternal(gMap, "group", GET_ALL_GROUPS_CMD, ":",
+        gidStaticMap);
+    
+    assertEquals("hdfs", uMap.get(10));
+    assertEquals(10, (int)uMap.inverse().get("hdfs"));
+    assertEquals("atm", uMap.get(1000));
+    assertEquals(1000, (int)uMap.inverse().get("atm"));
+    
+    assertEquals("hdfs", gMap.get(11501));
+    assertEquals(11501, (int)gMap.inverse().get("hdfs"));
+    assertEquals("mapred", gMap.get(200));
+    assertEquals(200, (int)gMap.inverse().get("mapred"));
+    assertEquals("mapred2", gMap.get(498));
+    assertEquals(498, (int)gMap.inverse().get("mapred2"));
+  }
 
   @Test
   public void testDuplicates() throws IOException {
@@ -51,15 +133,17 @@ public class TestIdUserGroup {
     BiMap<Integer, String> uMap = HashBiMap.create();
     BiMap<Integer, String> gMap = HashBiMap.create();
 
-    IdUserGroup.updateMapInternal(uMap, "user", GET_ALL_USERS_CMD, ":");
-    assertTrue(uMap.size() == 5);
+    IdUserGroup.updateMapInternal(uMap, "user", GET_ALL_USERS_CMD, ":",
+        EMPTY_PASS_THROUGH_MAP);
+    assertEquals(5, uMap.size());
     assertEquals("root", uMap.get(0));
     assertEquals("hdfs", uMap.get(11501));
     assertEquals("hdfs2",uMap.get(11502));
     assertEquals("bin", uMap.get(2));
     assertEquals("daemon", uMap.get(1));
 
-    IdUserGroup.updateMapInternal(gMap, "group", GET_ALL_GROUPS_CMD, ":");
+    IdUserGroup.updateMapInternal(gMap, "group", GET_ALL_GROUPS_CMD, ":",
+        EMPTY_PASS_THROUGH_MAP);
     assertTrue(gMap.size() == 3);
     assertEquals("hdfs",gMap.get(11501));
     assertEquals("mapred", gMap.get(497));
@@ -90,7 +174,8 @@ public class TestIdUserGroup {
     BiMap<Integer, String> uMap = HashBiMap.create();
     BiMap<Integer, String> gMap = HashBiMap.create();
 
-    IdUserGroup.updateMapInternal(uMap, "user", GET_ALL_USERS_CMD, ":");
+    IdUserGroup.updateMapInternal(uMap, "user", GET_ALL_USERS_CMD, ":",
+        EMPTY_PASS_THROUGH_MAP);
     assertTrue(uMap.size() == 7);
     assertEquals("nfsnobody", uMap.get(-2));
     assertEquals("nfsnobody1", uMap.get(-1));
@@ -100,7 +185,8 @@ public class TestIdUserGroup {
     assertEquals("hdfs",uMap.get(11501));
     assertEquals("daemon", uMap.get(2));
 
-    IdUserGroup.updateMapInternal(gMap, "group", GET_ALL_GROUPS_CMD, ":");
+    IdUserGroup.updateMapInternal(gMap, "group", GET_ALL_GROUPS_CMD, ":",
+        EMPTY_PASS_THROUGH_MAP);
     assertTrue(gMap.size() == 7);
     assertEquals("hdfs",gMap.get(11501));
     assertEquals("rpcuser", gMap.get(29));



Mime
View raw message