hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ji...@apache.org
Subject svn commit: r1568437 [3/3] - in /hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common: ./ src/main/docs/ src/main/java/ src/main/java/org/apache/hadoop/fs/ src/main/java/org/apache/hadoop/fs/s3/ src/main/java/org/apache/hadoop/fs/s3nati...
Date Fri, 14 Feb 2014 18:32:41 GMT
Propchange: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1565517-1568420

Modified: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java?rev=1568437&r1=1568436&r2=1568437&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java
(original)
+++ hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java
Fri Feb 14 18:32:37 2014
@@ -114,7 +114,8 @@ class Globber {
       if (fs != null) {
         scheme = fs.getUri().getScheme();
       } else {
-        scheme = fc.getDefaultFileSystem().getUri().getScheme();
+        scheme = fc.getFSofPath(fc.fixRelativePart(path)).
+                    getUri().getScheme();
       }
     }
     return scheme;
@@ -126,7 +127,8 @@ class Globber {
       if (fs != null) {
         authority = fs.getUri().getAuthority();
       } else {
-        authority = fc.getDefaultFileSystem().getUri().getAuthority();
+        authority = fc.getFSofPath(fc.fixRelativePart(path)).
+                      getUri().getAuthority();
       }
     }
     return authority ;
@@ -162,18 +164,26 @@ class Globber {
       // Starting out at the root of the filesystem, we try to match
       // filesystem entries against pattern components.
       ArrayList<FileStatus> candidates = new ArrayList<FileStatus>(1);
+      // To get the "real" FileStatus of root, we'd have to do an expensive
+      // RPC to the NameNode.  So we create a placeholder FileStatus which has
+      // the correct path, but defaults for the rest of the information.
+      // Later, if it turns out we actually want the FileStatus of root, we'll
+      // replace the placeholder with a real FileStatus obtained from the
+      // NameNode.
+      FileStatus rootPlaceholder;
       if (Path.WINDOWS && !components.isEmpty()
           && Path.isWindowsAbsolutePath(absPattern.toUri().getPath(), true)) {
         // On Windows the path could begin with a drive letter, e.g. /E:/foo.
         // We will skip matching the drive letter and start from listing the
         // root of the filesystem on that drive.
         String driveLetter = components.remove(0);
-        candidates.add(new FileStatus(0, true, 0, 0, 0, new Path(scheme,
-            authority, Path.SEPARATOR + driveLetter + Path.SEPARATOR)));
+        rootPlaceholder = new FileStatus(0, true, 0, 0, 0, new Path(scheme,
+            authority, Path.SEPARATOR + driveLetter + Path.SEPARATOR));
       } else {
-        candidates.add(new FileStatus(0, true, 0, 0, 0,
-            new Path(scheme, authority, Path.SEPARATOR)));
+        rootPlaceholder = new FileStatus(0, true, 0, 0, 0,
+            new Path(scheme, authority, Path.SEPARATOR));
       }
+      candidates.add(rootPlaceholder);
       
       for (int componentIdx = 0; componentIdx < components.size();
           componentIdx++) {
@@ -245,6 +255,12 @@ class Globber {
         candidates = newCandidates;
       }
       for (FileStatus status : candidates) {
+        // Use object equality to see if this status is the root placeholder.
+        // See the explanation for rootPlaceholder above for more information.
+        if (status == rootPlaceholder) {
+          status = getFileStatus(rootPlaceholder.getPath());
+          if (status == null) continue;
+        }
         // HADOOP-3497 semantics: the user-defined filter is applied at the
         // end, once the full path is built up.
         if (filter.accept(status.getPath())) {

Modified: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3FileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3FileSystem.java?rev=1568437&r1=1568436&r2=1568437&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3FileSystem.java
(original)
+++ hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3FileSystem.java
Fri Feb 14 18:32:37 2014
@@ -443,6 +443,12 @@ public class S3FileSystem extends FileSy
     return getConf().getLong("fs.s3.block.size", 64 * 1024 * 1024);
   }
 
+  @Override
+  public String getCanonicalServiceName() {
+    // Does not support Token
+    return null;
+  }
+
   // diagnostic methods
 
   void dump() throws IOException {

Modified: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java?rev=1568437&r1=1568436&r2=1568437&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
(original)
+++ hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
Fri Feb 14 18:32:37 2014
@@ -733,4 +733,10 @@ public class NativeS3FileSystem extends 
   public Path getWorkingDirectory() {
     return workingDir;
   }
+
+  @Override
+  public String getCanonicalServiceName() {
+    // Does not support Token
+    return null;
+  }
 }

Modified: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/LossyRetryInvocationHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/LossyRetryInvocationHandler.java?rev=1568437&r1=1568436&r2=1568437&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/LossyRetryInvocationHandler.java
(original)
+++ hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/LossyRetryInvocationHandler.java
Fri Feb 14 18:32:37 2014
@@ -51,11 +51,15 @@ public class LossyRetryInvocationHandler
     int retryCount = RetryCount.get();
     if (retryCount < this.numToDrop) {
       RetryCount.set(++retryCount);
-      LOG.info("Drop the response. Current retryCount == " + retryCount);
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Drop the response. Current retryCount == " + retryCount);
+      }
       throw new RetriableException("Fake Exception");
     } else {
-      LOG.info("retryCount == " + retryCount
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("retryCount == " + retryCount
           + ". It's time to normally process the response");
+      }
       return result;
     }
   }

Modified: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1568437&r1=1568436&r2=1568437&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
(original)
+++ hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
Fri Feb 14 18:32:37 2014
@@ -220,7 +220,7 @@ public class Client {
    * @param conf Configuration
    * @return the ping interval
    */
-  final static int getPingInterval(Configuration conf) {
+  final public static int getPingInterval(Configuration conf) {
     return conf.getInt(CommonConfigurationKeys.IPC_PING_INTERVAL_KEY,
         CommonConfigurationKeys.IPC_PING_INTERVAL_DEFAULT);
   }

Modified: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1568437&r1=1568436&r2=1568437&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
(original)
+++ hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
Fri Feb 14 18:32:37 2014
@@ -66,6 +66,7 @@ import javax.security.sasl.SaslServer;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration.IntegerRanges;
@@ -454,9 +455,10 @@ public abstract class Server {
    * Refresh the service authorization ACL for the service handled by this server
    * using the specified Configuration.
    */
-  public void refreshServiceAclWithConfigration(Configuration conf,
+  @Private
+  public void refreshServiceAclWithLoadedConfiguration(Configuration conf,
       PolicyProvider provider) {
-    serviceAuthorizationManager.refreshWithConfiguration(conf, provider);
+    serviceAuthorizationManager.refreshWithLoadedConfiguration(conf, provider);
   }
   /**
    * Returns a handle to the serviceAuthorizationManager (required in tests)

Modified: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocketWatcher.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocketWatcher.java?rev=1568437&r1=1568436&r2=1568437&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocketWatcher.java
(original)
+++ hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocketWatcher.java
Fri Feb 14 18:32:37 2014
@@ -37,6 +37,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.NativeCodeLoader;
 
+import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
 import com.google.common.util.concurrent.Uninterruptibles;
 
@@ -48,7 +49,7 @@ import com.google.common.util.concurrent
  * See {@link DomainSocket} for more information about UNIX domain sockets.
  */
 @InterfaceAudience.LimitedPrivate("HDFS")
-public final class DomainSocketWatcher extends Thread implements Closeable {
+public final class DomainSocketWatcher implements Closeable {
   static {
     if (SystemUtils.IS_OS_WINDOWS) {
       loadingFailureReason = "UNIX Domain sockets are not available on Windows.";
@@ -281,7 +282,7 @@ public final class DomainSocketWatcher e
         try {
           processedCond.await();
         } catch (InterruptedException e) {
-          this.interrupt();
+          Thread.currentThread().interrupt();
         }
         if (!toAdd.contains(entry)) {
           break;
@@ -308,7 +309,7 @@ public final class DomainSocketWatcher e
         try {
           processedCond.await();
         } catch (InterruptedException e) {
-          this.interrupt();
+          Thread.currentThread().interrupt();
         }
         if (!toRemove.containsKey(sock.fd)) {
           break;
@@ -381,7 +382,8 @@ public final class DomainSocketWatcher e
     }
   }
 
-  private final Thread watcherThread = new Thread(new Runnable() {
+  @VisibleForTesting
+  final Thread watcherThread = new Thread(new Runnable() {
     @Override
     public void run() {
       LOG.info(this + ": starting with interruptCheckPeriodMs = " +
@@ -443,6 +445,7 @@ public final class DomainSocketWatcher e
       } catch (IOException e) {
         LOG.error(toString() + " terminating on IOException", e);
       } finally {
+        kick(); // allow the handler for notificationSockets[0] to read a byte
         for (Entry entry : entries.values()) {
           sendCallback("close", entries, fdSet, entry.getDomainSocket().fd);
         }

Modified: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java?rev=1568437&r1=1568436&r2=1568437&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java
(original)
+++ hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java
Fri Feb 14 18:32:37 2014
@@ -27,6 +27,7 @@ import java.util.concurrent.ConcurrentHa
 
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
@@ -240,4 +241,18 @@ public class Groups {
     }
     return GROUPS;
   }
+
+  /**
+   * Create new groups used to map user-to-groups with loaded configuration.
+   * @param conf
+   * @return the groups being used to map user-to-groups.
+   */
+  @Private
+  public static synchronized Groups
+      getUserToGroupsMappingServiceWithLoadedConfiguration(
+          Configuration conf) {
+
+    GROUPS = new Groups(conf);
+    return GROUPS;
+  }
 }

Modified: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java?rev=1568437&r1=1568436&r2=1568437&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java
(original)
+++ hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java
Fri Feb 14 18:32:37 2014
@@ -356,7 +356,7 @@ public class LdapGroupsMapping
         c = reader.read();
       }
       reader.close();
-      return password.toString();
+      return password.toString().trim();
     } catch (IOException ioe) {
       throw new RuntimeException("Could not read password file: " + pwFile, ioe);
     }

Modified: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java?rev=1568437&r1=1568436&r2=1568437&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
(original)
+++ hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
Fri Feb 14 18:32:37 2014
@@ -26,6 +26,7 @@ import java.util.Set;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
@@ -122,10 +123,11 @@ public class ServiceAuthorizationManager
     // Make a copy of the original config, and load the policy file
     Configuration policyConf = new Configuration(conf);
     policyConf.addResource(policyFile);
-    refreshWithConfiguration(policyConf, provider);
+    refreshWithLoadedConfiguration(policyConf, provider);
   }
 
-  public synchronized void refreshWithConfiguration(Configuration conf,
+  @Private
+  public synchronized void refreshWithLoadedConfiguration(Configuration conf,
       PolicyProvider provider) {
     final Map<Class<?>, AccessControlList> newAcls =
         new IdentityHashMap<Class<?>, AccessControlList>();

Modified: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/overview.html
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/overview.html?rev=1568437&r1=1568436&r2=1568437&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/overview.html
(original)
+++ hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/main/java/overview.html
Fri Feb 14 18:32:37 2014
@@ -57,7 +57,7 @@ that process vast amounts of data. Here'
 
 <ul>
   <li>
-    Hadoop was been demonstrated on GNU/Linux clusters with 2000 nodes.
+    Hadoop has been demonstrated on GNU/Linux clusters with more than 4000 nodes.
   </li>
   <li>
     Windows is also a supported platform.

Modified: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3/S3FileSystemContractBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3/S3FileSystemContractBaseTest.java?rev=1568437&r1=1568436&r2=1568437&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3/S3FileSystemContractBaseTest.java
(original)
+++ hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3/S3FileSystemContractBaseTest.java
Fri Feb 14 18:32:37 2014
@@ -54,5 +54,10 @@ public abstract class S3FileSystemContra
     assertEquals("Double default block size", newBlockSize,
 	fs.getFileStatus(file).getBlockSize());
   }
-  
+
+  public void testCanonicalName() throws Exception {
+    assertNull("s3 doesn't support security token and shouldn't have canonical name",
+               fs.getCanonicalServiceName());
+  }
+
 }

Modified: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3native/NativeS3FileSystemContractBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3native/NativeS3FileSystemContractBaseTest.java?rev=1568437&r1=1568436&r2=1568437&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3native/NativeS3FileSystemContractBaseTest.java
(original)
+++ hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3native/NativeS3FileSystemContractBaseTest.java
Fri Feb 14 18:32:37 2014
@@ -48,7 +48,12 @@ public abstract class NativeS3FileSystem
     store.purge("test");
     super.tearDown();
   }
-  
+
+  public void testCanonicalName() throws Exception {
+    assertNull("s3n doesn't support security token and shouldn't have canonical name",
+               fs.getCanonicalServiceName());
+  }
+
   public void testListStatusForRoot() throws Exception {
     FileStatus[] paths = fs.listStatus(path("/"));
     assertEquals("Root directory is not empty; ", 0, paths.length);
@@ -60,7 +65,7 @@ public abstract class NativeS3FileSystem
     assertEquals(1, paths.length);
     assertEquals(path("/test"), paths[0].getPath());
   }
-  
+
   public void testNoTrailingBackslashOnBucket() throws Exception {
     assertTrue(fs.getFileStatus(new Path(fs.getUri().toString())).isDirectory());
   }

Modified: hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocketWatcher.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocketWatcher.java?rev=1568437&r1=1568436&r2=1568437&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocketWatcher.java
(original)
+++ hadoop/common/branches/HDFS-5535/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocketWatcher.java
Fri Feb 14 18:32:37 2014
@@ -73,9 +73,10 @@ public class TestDomainSocketWatcher {
    */
   @Test(timeout=60000)
   public void testInterruption() throws Exception {
-    DomainSocketWatcher watcher = new DomainSocketWatcher(10);
-    watcher.interrupt();
-    Uninterruptibles.joinUninterruptibly(watcher);
+    final DomainSocketWatcher watcher = new DomainSocketWatcher(10);
+    watcher.watcherThread.interrupt();
+    Uninterruptibles.joinUninterruptibly(watcher.watcherThread);
+    watcher.close();
   }
   
   @Test(timeout=300000)



Mime
View raw message