hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From szets...@apache.org
Subject svn commit: r1565519 - in /hadoop/common/branches/HDFS-5535/hadoop-common-project: hadoop-common/ hadoop-common/src/main/java/ hadoop-common/src/main/java/org/apache/hadoop/http/ hadoop-common/src/main/java/org/apache/hadoop/io/retry/ hadoop-common/src...
Date Fri, 07 Feb 2014 02:43:07 GMT
Author: szetszwo
Date: Fri Feb  7 02:43:04 2014
New Revision: 1565519

URL: http://svn.apache.org/r1565519
Log:
Merge r1555021 through r1565516 from trunk.

Added:
    hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestCompositeService.java
      - copied unchanged from r1565516, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestCompositeService.java
Modified:
    hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/CHANGES.txt   (contents,
props changed)
    hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/  
(props changed)
    hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java
    hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java
    hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
    hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java
    hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
    hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java
    hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
    hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/site/apt/SecureMode.apt.vm
    hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java
    hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/IdUserGroup.java
    hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/nfs3/TestIdUserGroup.java
    hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestFrameDecoder.java

Modified: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1565519&r1=1565518&r2=1565519&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/CHANGES.txt Fri Feb
 7 02:43:04 2014
@@ -113,6 +113,11 @@ Trunk (Unreleased)
 
     HADOOP-10177. Create CLI tools for managing keys. (Larry McCay via omalley)
 
+    HADOOP-10244. TestKeyShell improperly tests the results of delete (Larry
+    McCay via omalley)
+
+    HADOOP-10325. Improve jenkins javadoc warnings from test-patch.sh (cmccabe)
+
   BUG FIXES
 
     HADOOP-9451. Fault single-layer config if node group topology is enabled.
@@ -313,6 +318,15 @@ Release 2.4.0 - UNRELEASED
 
     HADOOP-10320. Javadoc in InterfaceStability.java lacks final </ul>.
     (René Nyffenegger via cnauroth)
+    
+    HADOOP-10085. CompositeService should allow adding services while being 
+    inited. (Steve Loughran via kasha)
+
+    HADOOP-10327. Trunk windows build broken after HDFS-5746.
+    (Vinay via cnauroth)
+
+    HADOOP-10330. TestFrameDecoder fails if it cannot bind port 12345.
+    (Arpit Agarwal)
 
 Release 2.3.0 - UNRELEASED
 
@@ -685,6 +699,8 @@ Release 2.3.0 - UNRELEASED
 
     HADOOP-10311. Cleanup vendor names from the code base. (tucu)
 
+    HADOOP-10273. Fix 'mvn site'. (Arpit Agarwal)
+
 Release 2.2.0 - 2013-10-13
 
   INCOMPATIBLE CHANGES

Propchange: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1563385-1565516

Propchange: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1563385-1565516

Modified: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java?rev=1565519&r1=1565518&r2=1565519&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java
(original)
+++ hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java
Fri Feb  7 02:43:04 2014
@@ -34,13 +34,14 @@ public class HttpConfig {
     HTTPS_ONLY,
     HTTP_AND_HTTPS;
 
+    private static final Policy[] VALUES = values();
     public static Policy fromString(String value) {
-      if (HTTPS_ONLY.name().equalsIgnoreCase(value)) {
-        return HTTPS_ONLY;
-      } else if (HTTP_AND_HTTPS.name().equalsIgnoreCase(value)) {
-        return HTTP_AND_HTTPS;
+      for (Policy p : VALUES) {
+        if (p.name().equalsIgnoreCase(value)) {
+          return p;
+        }
       }
-      return HTTP_ONLY;
+      return null;
     }
 
     public boolean isHttpEnabled() {

Modified: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java?rev=1565519&r1=1565518&r2=1565519&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java
(original)
+++ hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java
Fri Feb  7 02:43:04 2014
@@ -151,6 +151,13 @@ public class RetryPolicies {
         delayMillis, maxDelayBase);
   }
   
+  public static final RetryPolicy failoverOnNetworkException(
+      RetryPolicy fallbackPolicy, int maxFailovers, int maxRetries,
+      long delayMillis, long maxDelayBase) {
+    return new FailoverOnNetworkExceptionRetry(fallbackPolicy, maxFailovers,
+        maxRetries, delayMillis, maxDelayBase);
+  }
+  
   static class TryOnceThenFail implements RetryPolicy {
     @Override
     public RetryAction shouldRetry(Exception e, int retries, int failovers,
@@ -516,18 +523,25 @@ public class RetryPolicies {
     
     private RetryPolicy fallbackPolicy;
     private int maxFailovers;
+    private int maxRetries;
     private long delayMillis;
     private long maxDelayBase;
     
     public FailoverOnNetworkExceptionRetry(RetryPolicy fallbackPolicy,
         int maxFailovers) {
-      this(fallbackPolicy, maxFailovers, 0, 0);
+      this(fallbackPolicy, maxFailovers, 0, 0, 0);
     }
     
     public FailoverOnNetworkExceptionRetry(RetryPolicy fallbackPolicy,
         int maxFailovers, long delayMillis, long maxDelayBase) {
+      this(fallbackPolicy, maxFailovers, 0, delayMillis, maxDelayBase);
+    }
+    
+    public FailoverOnNetworkExceptionRetry(RetryPolicy fallbackPolicy,
+        int maxFailovers, int maxRetries, long delayMillis, long maxDelayBase) {
       this.fallbackPolicy = fallbackPolicy;
       this.maxFailovers = maxFailovers;
+      this.maxRetries = maxRetries;
       this.delayMillis = delayMillis;
       this.maxDelayBase = maxDelayBase;
     }
@@ -549,6 +563,10 @@ public class RetryPolicies {
             "failovers (" + failovers + ") exceeded maximum allowed ("
             + maxFailovers + ")");
       }
+      if (retries - failovers > maxRetries) {
+        return new RetryAction(RetryAction.RetryDecision.FAIL, 0, "retries ("
+            + retries + ") exceeded maximum allowed (" + maxRetries + ")");
+      }
       
       if (e instanceof ConnectException ||
           e instanceof NoRouteToHostException ||

Modified: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1565519&r1=1565518&r2=1565519&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
(original)
+++ hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
Fri Feb  7 02:43:04 2014
@@ -451,6 +451,14 @@ public abstract class Server {
   }
 
   /**
+   * Refresh the service authorization ACL for the service handled by this server
+   * using the specified Configuration.
+   */
+  public void refreshServiceAclWithConfigration(Configuration conf,
+      PolicyProvider provider) {
+    serviceAuthorizationManager.refreshWithConfiguration(conf, provider);
+  }
+  /**
    * Returns a handle to the serviceAuthorizationManager (required in tests)
    * @return instance of ServiceAuthorizationManager for this server
    */

Modified: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java?rev=1565519&r1=1565518&r2=1565519&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java
(original)
+++ hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java
Fri Feb  7 02:43:04 2014
@@ -30,6 +30,8 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.StringUtils;
 
+import com.google.common.annotations.VisibleForTesting;
+
 @InterfaceAudience.Private
 public class ProxyUsers {
 
@@ -177,4 +179,13 @@ public class ProxyUsers {
       (list.contains("*"));
   }
 
+  @VisibleForTesting
+  public static Map<String, Collection<String>> getProxyGroups() {
+    return proxyGroups;
+  }
+
+  @VisibleForTesting
+  public static Map<String, Collection<String>> getProxyHosts() {
+    return proxyHosts;
+  }
 }

Modified: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java?rev=1565519&r1=1565518&r2=1565519&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
(original)
+++ hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
Fri Feb  7 02:43:04 2014
@@ -33,6 +33,8 @@ import org.apache.hadoop.security.Kerber
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 
+import com.google.common.annotations.VisibleForTesting;
+
 /**
  * An authorization manager which handles service-level authorization
  * for incoming service requests.
@@ -120,19 +122,23 @@ public class ServiceAuthorizationManager
     // Make a copy of the original config, and load the policy file
     Configuration policyConf = new Configuration(conf);
     policyConf.addResource(policyFile);
-    
+    refreshWithConfiguration(policyConf, provider);
+  }
+
+  public synchronized void refreshWithConfiguration(Configuration conf,
+      PolicyProvider provider) {
     final Map<Class<?>, AccessControlList> newAcls =
-      new IdentityHashMap<Class<?>, AccessControlList>();
+        new IdentityHashMap<Class<?>, AccessControlList>();
 
     // Parse the config file
     Service[] services = provider.getServices();
     if (services != null) {
       for (Service service : services) {
-        AccessControlList acl = 
-          new AccessControlList(
-              policyConf.get(service.getServiceKey(), 
-                             AccessControlList.WILDCARD_ACL_VALUE)
-              );
+        AccessControlList acl =
+            new AccessControlList(
+                conf.get(service.getServiceKey(),
+                    AccessControlList.WILDCARD_ACL_VALUE)
+            );
         newAcls.put(service.getProtocol(), acl);
       }
     }
@@ -141,8 +147,13 @@ public class ServiceAuthorizationManager
     protocolToAcl = newAcls;
   }
 
-  // Package-protected for use in tests.
-  Set<Class<?>> getProtocolsWithAcls() {
+  @VisibleForTesting
+  public Set<Class<?>> getProtocolsWithAcls() {
     return protocolToAcl.keySet();
   }
+
+  @VisibleForTesting
+  public AccessControlList getProtocolsAcls(Class<?> className) {
+    return protocolToAcl.get(className);
+  }
 }

Modified: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java?rev=1565519&r1=1565518&r2=1565519&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java
(original)
+++ hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java
Fri Feb  7 02:43:04 2014
@@ -19,7 +19,6 @@
 package org.apache.hadoop.service;
 
 import java.util.ArrayList;
-import java.util.Collections;
 import java.util.List;
 
 import org.apache.commons.logging.Log;
@@ -54,13 +53,13 @@ public class CompositeService extends Ab
   }
 
   /**
-   * Get an unmodifiable list of services
+   * Get a cloned list of services
    * @return a list of child services at the time of invocation -
    * added services will not be picked up.
    */
   public List<Service> getServices() {
     synchronized (serviceList) {
-      return Collections.unmodifiableList(serviceList);
+      return new ArrayList<Service>(serviceList);
     }
   }
 

Modified: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c?rev=1565519&r1=1565518&r2=1565519&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
(original)
+++ hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
Fri Feb  7 02:43:04 2014
@@ -671,6 +671,7 @@ Java_org_apache_hadoop_io_nativeio_Nativ
   JNIEnv *env, jclass clazz, jobject jfd, jint jprot,
   jboolean jshared, jlong length)
 {
+#ifdef UNIX
   void *addr = 0;
   int prot, flags, fd;
   
@@ -684,18 +685,33 @@ Java_org_apache_hadoop_io_nativeio_Nativ
     throw_ioe(env, errno);
   }
   return (jlong)(intptr_t)addr;
+#endif  //   UNIX
+
+#ifdef WINDOWS
+  THROW(env, "java/io/IOException",
+    "The function POSIX.mmap() is not supported on Windows");
+  return NULL;
+#endif
 }
 
 JNIEXPORT void JNICALL 
 Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_munmap(
   JNIEnv *env, jclass clazz, jlong jaddr, jlong length)
 {
+#ifdef UNIX
   void *addr;
 
   addr = (void*)(intptr_t)jaddr;
   if (munmap(addr, length) < 0) {
     throw_ioe(env, errno);
   }
+#endif  //   UNIX
+
+#ifdef WINDOWS
+  THROW(env, "java/io/IOException",
+    "The function POSIX.munmap() is not supported on Windows");
+  return NULL;
+#endif
 }
 
 
@@ -1050,4 +1066,3 @@ JNIEnv *env, jclass clazz)
 /**
  * vim: sw=2: ts=2: et:
  */
-

Modified: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/site/apt/SecureMode.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/site/apt/SecureMode.apt.vm?rev=1565519&r1=1565518&r2=1565519&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/site/apt/SecureMode.apt.vm
(original)
+++ hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/site/apt/SecureMode.apt.vm
Fri Feb  7 02:43:04 2014
@@ -352,7 +352,8 @@ Configuration for <<<conf/core-site.xml>
 | | | This value is deprecated. Use dfs.http.policy |
 *-------------------------+-------------------------+------------------------+
 | <<<dfs.http.policy>>> | <HTTP_ONLY> or <HTTPS_ONLY> or <HTTP_AND_HTTPS>
| |
-| | | HTTPS_ONLY turns off http access |
+| | | HTTPS_ONLY turns off http access. This option takes precedence over |
+| | | the deprecated configuration dfs.https.enable and hadoop.ssl.enabled. |
 *-------------------------+-------------------------+------------------------+
 | <<<dfs.namenode.https-address>>> | <nn_host_fqdn:50470> | |
 *-------------------------+-------------------------+------------------------+

Modified: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java?rev=1565519&r1=1565518&r2=1565519&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java
(original)
+++ hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java
Fri Feb  7 02:43:04 2014
@@ -41,7 +41,7 @@ public class TestKeyShell {
   
   @Test
   public void testKeySuccessfulKeyLifecycle() throws Exception {
-    outContent.flush();
+    outContent.reset();
     String[] args1 = {"create", "key1", "--provider", 
         "jceks://file" + tmpDir + "/keystore.jceks"};
     int rc = 0;
@@ -52,14 +52,14 @@ public class TestKeyShell {
     assertTrue(outContent.toString().contains("key1 has been successfully " +
     		"created."));
 
-    outContent.flush();
+    outContent.reset();
     String[] args2 = {"list", "--provider", 
         "jceks://file" + tmpDir + "/keystore.jceks"};
     rc = ks.run(args2);
     assertEquals(0, rc);
     assertTrue(outContent.toString().contains("key1"));
 
-    outContent.flush();
+    outContent.reset();
     String[] args3 = {"roll", "key1", "--provider", 
         "jceks://file" + tmpDir + "/keystore.jceks"};
     rc = ks.run(args3);
@@ -67,7 +67,7 @@ public class TestKeyShell {
     assertTrue(outContent.toString().contains("key1 has been successfully " +
     		"rolled."));
 
-    outContent.flush();
+    outContent.reset();
     String[] args4 = {"delete", "key1", "--provider", 
         "jceks://file" + tmpDir + "/keystore.jceks"};
     rc = ks.run(args4);
@@ -75,12 +75,12 @@ public class TestKeyShell {
     assertTrue(outContent.toString().contains("key1 has been successfully " +
     		"deleted."));
 
-    outContent.flush();
+    outContent.reset();
     String[] args5 = {"list", "--provider", 
         "jceks://file" + tmpDir + "/keystore.jceks"};
     rc = ks.run(args5);
     assertEquals(0, rc);
-    assertTrue(outContent.toString().contains("key1"));
+    assertFalse(outContent.toString(), outContent.toString().contains("key1"));
   }
   
   @Test
@@ -165,7 +165,7 @@ public class TestKeyShell {
     assertTrue(outContent.toString().contains("key1 has been successfully " +
     		"created."));
 
-    outContent.flush();
+    outContent.reset();
     String[] args2 = {"delete", "key1", "--provider", 
         "jceks://file" + tmpDir + "/keystore.jceks"};
     rc = ks.run(args2);

Modified: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/IdUserGroup.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/IdUserGroup.java?rev=1565519&r1=1565518&r2=1565519&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/IdUserGroup.java
(original)
+++ hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/IdUserGroup.java
Fri Feb  7 02:43:04 2014
@@ -50,14 +50,6 @@ public class IdUserGroup {
   private BiMap<Integer, String> gidNameMap = HashBiMap.create();
 
   private long lastUpdateTime = 0; // Last time maps were updated
-
-  static public class DuplicateNameOrIdException extends IOException {
-    private static final long serialVersionUID = 1L;
-
-    public DuplicateNameOrIdException(String msg) {
-      super(msg);
-    }
-  }
   
   public IdUserGroup() throws IOException {
     updateMaps();
@@ -80,7 +72,8 @@ public class IdUserGroup {
     }
   }
 
-  private static final String DUPLICATE_NAME_ID_DEBUG_INFO = "NFS gateway can't start with
duplicate name or id on the host system.\n"
+  private static final String DUPLICATE_NAME_ID_DEBUG_INFO =
+      "NFS gateway could have problem starting with duplicate name or id on the host system.\n"
       + "This is because HDFS (non-kerberos cluster) uses name as the only way to identify
a user or group.\n"
       + "The host system with duplicated user/group name or id might work fine most of the
time by itself.\n"
       + "However when NFS gateway talks to HDFS, HDFS accepts only user and group name.\n"
@@ -88,6 +81,16 @@ public class IdUserGroup {
       + "<getent passwd | cut -d: -f1,3> and <getent group | cut -d: -f1,3> on
Linux systms,\n"
       + "<dscl . -list /Users UniqueID> and <dscl . -list /Groups PrimaryGroupID>
on MacOS.";
   
+  private static void reportDuplicateEntry(final String header,
+      final Integer key, final String value,
+      final Integer ekey, final String evalue) {    
+      LOG.warn("\n" + header + String.format(
+          "new entry (%d, %s), existing entry: (%d, %s).\n%s\n%s",
+          key, value, ekey, evalue,
+          "The new entry is to be ignored for the following reason.",
+          DUPLICATE_NAME_ID_DEBUG_INFO));
+  }
+      
   /**
    * Get the whole list of users and groups and save them in the maps.
    * @throws IOException 
@@ -108,22 +111,27 @@ public class IdUserGroup {
         }
         LOG.debug("add to " + mapName + "map:" + nameId[0] + " id:" + nameId[1]);
         // HDFS can't differentiate duplicate names with simple authentication
-        Integer key = Integer.valueOf(nameId[1]);
-        String value = nameId[0];
+        final Integer key = Integer.valueOf(nameId[1]);
+        final String value = nameId[0];        
         if (map.containsKey(key)) {
-          LOG.error(String.format(
-              "Got duplicate id:(%d, %s), existing entry: (%d, %s).\n%s", key,
-              value, key, map.get(key), DUPLICATE_NAME_ID_DEBUG_INFO));
-          throw new DuplicateNameOrIdException("Got duplicate id.");
+          final String prevValue = map.get(key);
+          if (value.equals(prevValue)) {
+            // silently ignore equivalent entries
+            continue;
+          }
+          reportDuplicateEntry(
+              "Got multiple names associated with the same id: ",
+              key, value, key, prevValue);           
+          continue;
         }
-        if (map.containsValue(nameId[0])) {
-          LOG.error(String.format(
-              "Got duplicate name:(%d, %s), existing entry: (%d, %s) \n%s",
-              key, value, map.inverse().get(value), value,
-              DUPLICATE_NAME_ID_DEBUG_INFO));
-          throw new DuplicateNameOrIdException("Got duplicate name");
+        if (map.containsValue(value)) {
+          final Integer prevKey = map.inverse().get(value);
+          reportDuplicateEntry(
+              "Got multiple ids associated with the same name: ",
+              key, value, prevKey, value);
+          continue;
         }
-        map.put(Integer.valueOf(nameId[1]), nameId[0]);
+        map.put(key, value);
       }
       LOG.info("Updated " + mapName + " map size:" + map.size());
       

Modified: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/nfs3/TestIdUserGroup.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/nfs3/TestIdUserGroup.java?rev=1565519&r1=1565518&r2=1565519&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/nfs3/TestIdUserGroup.java
(original)
+++ hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/nfs3/TestIdUserGroup.java
Fri Feb  7 02:43:04 2014
@@ -17,11 +17,10 @@
  */
 package org.apache.hadoop.nfs.nfs3;
 
-import static org.junit.Assert.fail;
-
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 import java.io.IOException;
 
-import org.apache.hadoop.nfs.nfs3.IdUserGroup.DuplicateNameOrIdException;
 import org.junit.Test;
 
 import com.google.common.collect.BiMap;
@@ -33,24 +32,36 @@ public class TestIdUserGroup {
   public void testDuplicates() throws IOException {
     String GET_ALL_USERS_CMD = "echo \"root:x:0:0:root:/root:/bin/bash\n"
         + "hdfs:x:11501:10787:Grid Distributed File System:/home/hdfs:/bin/bash\n"
-        + "hdfs:x:11502:10788:Grid Distributed File System:/home/hdfs:/bin/bash\""
+        + "hdfs:x:11502:10788:Grid Distributed File System:/home/hdfs:/bin/bash\n"
+        + "hdfs1:x:11501:10787:Grid Distributed File System:/home/hdfs:/bin/bash\n"
+        + "hdfs2:x:11502:10787:Grid Distributed File System:/home/hdfs:/bin/bash\n"
+        + "bin:x:2:2:bin:/bin:/bin/sh\n"
+        + "bin:x:1:1:bin:/bin:/sbin/nologin\n"
+        + "daemon:x:1:1:daemon:/usr/sbin:/bin/sh\n"
+        + "daemon:x:2:2:daemon:/sbin:/sbin/nologin\""
         + " | cut -d: -f1,3";
     String GET_ALL_GROUPS_CMD = "echo \"hdfs:*:11501:hrt_hdfs\n"
-        + "mapred:x:497\n" + "mapred2:x:497\"" + " | cut -d: -f1,3";
+        + "mapred:x:497\n"
+        + "mapred2:x:497\n"
+        + "mapred:x:498\n" 
+        + "mapred3:x:498\"" 
+        + " | cut -d: -f1,3";
     // Maps for id to name map
     BiMap<Integer, String> uMap = HashBiMap.create();
     BiMap<Integer, String> gMap = HashBiMap.create();
 
-    try {
-      IdUserGroup.updateMapInternal(uMap, "user", GET_ALL_USERS_CMD, ":");
-      fail("didn't detect the duplicate name");
-    } catch (DuplicateNameOrIdException e) {
-    }
+    IdUserGroup.updateMapInternal(uMap, "user", GET_ALL_USERS_CMD, ":");
+    assertTrue(uMap.size() == 5);
+    assertEquals(uMap.get(0), "root");
+    assertEquals(uMap.get(11501), "hdfs");
+    assertEquals(uMap.get(11502), "hdfs2");
+    assertEquals(uMap.get(2), "bin");
+    assertEquals(uMap.get(1), "daemon");
     
-    try {
-      IdUserGroup.updateMapInternal(gMap, "group", GET_ALL_GROUPS_CMD, ":");
-      fail("didn't detect the duplicate id");
-    } catch (DuplicateNameOrIdException e) {
-    }
+    IdUserGroup.updateMapInternal(gMap, "group", GET_ALL_GROUPS_CMD, ":");
+    assertTrue(gMap.size() == 3);
+    assertEquals(gMap.get(11501), "hdfs");
+    assertEquals(gMap.get(497), "mapred");
+    assertEquals(gMap.get(498), "mapred3");    
   }
 }

Modified: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestFrameDecoder.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestFrameDecoder.java?rev=1565519&r1=1565518&r2=1565519&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestFrameDecoder.java
(original)
+++ hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestFrameDecoder.java
Fri Feb  7 02:43:04 2014
@@ -23,6 +23,7 @@ import static org.junit.Assert.assertFal
 import static org.junit.Assert.assertTrue;
 
 import java.nio.ByteBuffer;
+import java.util.Random;
 
 import org.apache.hadoop.oncrpc.RpcUtil.RpcFrameDecoder;
 import org.apache.hadoop.oncrpc.security.CredentialsNone;
@@ -31,17 +32,17 @@ import org.jboss.netty.buffer.ByteBuffer
 import org.jboss.netty.buffer.ChannelBuffer;
 import org.jboss.netty.buffer.ChannelBuffers;
 import org.jboss.netty.channel.Channel;
+import org.jboss.netty.channel.ChannelException;
 import org.jboss.netty.channel.ChannelHandlerContext;
 import org.junit.Test;
 import org.mockito.Mockito;
 
 public class TestFrameDecoder {
 
-  private static int port = 12345; // some random server port
   private static int resultSize;
 
-  static void testRequest(XDR request) {
-    SimpleTcpClient tcpClient = new SimpleTcpClient("localhost", port, request,
+  static void testRequest(XDR request, int serverPort) {
+    SimpleTcpClient tcpClient = new SimpleTcpClient("localhost", serverPort, request,
         true);
     tcpClient.run();
   }
@@ -148,10 +149,25 @@ public class TestFrameDecoder {
   @Test
   public void testFrames() {
 
-    RpcProgram program = new TestFrameDecoder.TestRpcProgram("TestRpcProgram",
-        "localhost", port, 100000, 1, 2);
-    SimpleTcpServer tcpServer = new SimpleTcpServer(port, program, 1);
-    tcpServer.run();
+    Random rand = new Random();
+    int serverPort = 30000 + rand.nextInt(10000);
+    int retries = 10;    // A few retries in case initial choice is in use.
+
+    while (true) {
+      try {
+        RpcProgram program = new TestFrameDecoder.TestRpcProgram("TestRpcProgram",
+            "localhost", serverPort, 100000, 1, 2);
+        SimpleTcpServer tcpServer = new SimpleTcpServer(serverPort, program, 1);
+        tcpServer.run();
+        break;          // Successfully bound a port, break out.
+      } catch (ChannelException ce) {
+        if (retries-- > 0) {
+          serverPort += rand.nextInt(20); // Port in use? Try another.
+        } else {
+          throw ce;     // Out of retries.
+        }
+      }
+    }
 
     XDR xdrOut = createGetportMount();
     int headerSize = xdrOut.size();
@@ -161,7 +177,7 @@ public class TestFrameDecoder {
     int requestSize = xdrOut.size() - headerSize;
 
     // Send the request to the server
-    testRequest(xdrOut);
+    testRequest(xdrOut, serverPort);
 
     // Verify the server got the request with right size
     assertEquals(requestSize, resultSize);



Mime
View raw message