hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From w...@apache.org
Subject hadoop git commit: HADOOP-14386. Rewind trunk from Guava 21.0 back to Guava 11.0.2.
Date Tue, 09 May 2017 16:23:00 GMT
Repository: hadoop
Updated Branches:
  refs/heads/trunk c60164fb6 -> 543aac9f2


HADOOP-14386. Rewind trunk from Guava 21.0 back to Guava 11.0.2.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/543aac9f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/543aac9f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/543aac9f

Branch: refs/heads/trunk
Commit: 543aac9f281871a40473e83061f6deadc0bbdab7
Parents: c60164f
Author: Andrew Wang <wang@apache.org>
Authored: Tue May 9 09:22:53 2017 -0700
Committer: Andrew Wang <wang@apache.org>
Committed: Tue May 9 09:22:53 2017 -0700

----------------------------------------------------------------------
 .../apache/hadoop/fs/shell/XAttrCommands.java   |   7 +-
 .../hdfs/server/datanode/DataStorage.java       |  10 +-
 .../hdfs/server/namenode/AclTransformation.java |   7 +-
 .../hadoop/hdfs/server/namenode/JournalSet.java |  33 ++--
 .../qjournal/client/DirectExecutorService.java  | 154 +++++++++++++++++++
 .../hdfs/qjournal/client/TestQJMWithFaults.java |   3 +-
 .../client/TestQuorumJournalManager.java        |   3 +-
 hadoop-project/pom.xml                          |   2 +-
 .../pb/ApplicationSubmissionContextPBImpl.java  |   3 +-
 .../org/apache/hadoop/yarn/webapp/WebApp.java   |   6 +-
 .../server/resourcemanager/RMAppManager.java    |  20 ++-
 .../fair/AllocationFileLoaderService.java       |   3 +-
 .../scheduler/fair/FairSchedulerUtilities.java  |  69 +++++++++
 .../scheduler/fair/QueueManager.java            |   6 +-
 .../fair/TestFairSchedulerUtilities.java        |  67 ++++++++
 15 files changed, 341 insertions(+), 52 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/543aac9f/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java
index 4505aa9..6301776 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java
@@ -20,12 +20,9 @@ package org.apache.hadoop.fs.shell;
 import java.io.IOException;
 import java.util.Iterator;
 import java.util.LinkedList;
-import java.util.Locale;
 import java.util.Map;
 import java.util.Map.Entry;
 
-import com.google.common.base.Enums;
-import com.google.common.base.Function;
 import com.google.common.base.Preconditions;
 
 import org.apache.hadoop.HadoopIllegalArgumentException;
@@ -66,8 +63,6 @@ class XAttrCommands extends FsCommand {
       " and values encoded as hexadecimal and base64 are prefixed with " +
       "0x and 0s, respectively.\n" +
       "<path>: The file or directory.\n";
-    private final static Function<String, XAttrCodec> enValueOfFunc =
-        Enums.stringConverter(XAttrCodec.class);
 
     private String name = null;
     private boolean dump = false;
@@ -79,7 +74,7 @@ class XAttrCommands extends FsCommand {
       String en = StringUtils.popOptionWithArgument("-e", args);
       if (en != null) {
         try {
-          encoding = enValueOfFunc.apply(StringUtils.toUpperCase(en));
+          encoding = XAttrCodec.valueOf(StringUtils.toUpperCase(en));
         } catch (IllegalArgumentException e) {
           throw new IllegalArgumentException(
               "Invalid/unsupported encoding option specified: " + en);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/543aac9f/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
index 835643b..9a71081 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
@@ -65,7 +65,6 @@ import com.google.common.annotations.VisibleForTesting;
 import com.google.common.collect.ComparisonChain;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
-import com.google.common.util.concurrent.Futures;
 
 /** 
  * Data storage information file.
@@ -1109,7 +1108,14 @@ public class DataStorage extends Storage {
     }
     linkWorkers.shutdown();
     for (Future<Void> f : futures) {
-      Futures.getChecked(f, IOException.class);
+      try {
+        f.get();
+      } catch (InterruptedException e) {
+        Thread.currentThread().interrupt();
+        throw new IOException(e);
+      } catch (ExecutionException e) {
+        throw new IOException(e);
+      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/543aac9f/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/AclTransformation.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/AclTransformation.java
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/AclTransformation.java
index 3e4a319..4402e26 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/AclTransformation.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/AclTransformation.java
@@ -28,7 +28,6 @@ import java.util.EnumSet;
 import java.util.Iterator;
 import java.util.List;
 
-import com.google.common.base.MoreObjects;
 import com.google.common.collect.ComparisonChain;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
@@ -366,8 +365,10 @@ final class AclTransformation {
     for (AclEntry entry: aclBuilder) {
       scopeFound.add(entry.getScope());
       if (entry.getType() == GROUP || entry.getName() != null) {
-        FsAction scopeUnionPerms = MoreObjects.firstNonNull(
-          unionPerms.get(entry.getScope()), FsAction.NONE);
+        FsAction scopeUnionPerms = unionPerms.get(entry.getScope());
+        if (scopeUnionPerms == null) {
+          scopeUnionPerms = FsAction.NONE;
+        }
         unionPerms.put(entry.getScope(),
           scopeUnionPerms.or(entry.getPermission()));
       }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/543aac9f/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java
index db77d31..e7f2adb 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java
@@ -40,7 +40,6 @@ import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog;
 import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest;
 
 import com.google.common.base.Preconditions;
-import com.google.common.collect.ComparisonChain;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableListMultimap;
 import com.google.common.collect.Lists;
@@ -57,28 +56,18 @@ public class JournalSet implements JournalManager {
 
   static final Log LOG = LogFactory.getLog(FSEditLog.class);
 
+  // we want local logs to be ordered earlier in the collection, and true
+  // is considered larger than false, so reverse the comparator
   private static final Comparator<EditLogInputStream>
-    LOCAL_LOG_PREFERENCE_COMPARATOR = new Comparator<EditLogInputStream>() {
-    @Override
-    public int compare(EditLogInputStream elis1, EditLogInputStream elis2) {
-      // we want local logs to be ordered earlier in the collection, and true
-      // is considered larger than false, so we want to invert the booleans here
-      return ComparisonChain.start().compareFalseFirst(!elis1.isLocalLog(),
-          !elis2.isLocalLog()).result();
-    }
-  };
-  
-  static final public Comparator<EditLogInputStream>
-    EDIT_LOG_INPUT_STREAM_COMPARATOR = new Comparator<EditLogInputStream>() {
-      @Override
-      public int compare(EditLogInputStream a, EditLogInputStream b) {
-        return ComparisonChain.start().
-          compare(a.getFirstTxId(), b.getFirstTxId()).
-          compare(b.getLastTxId(), a.getLastTxId()).
-          result();
-      }
-    };
-  
+      LOCAL_LOG_PREFERENCE_COMPARATOR = Comparator
+      .comparing(EditLogInputStream::isLocalLog)
+      .reversed();
+
+  public static final Comparator<EditLogInputStream>
+      EDIT_LOG_INPUT_STREAM_COMPARATOR = Comparator
+      .comparing(EditLogInputStream::getFirstTxId)
+      .thenComparing(EditLogInputStream::getLastTxId);
+
   /**
    * Container for a JournalManager paired with its currently
    * active stream.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/543aac9f/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/DirectExecutorService.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/DirectExecutorService.java
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/DirectExecutorService.java
new file mode 100644
index 0000000..15d2a13
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/DirectExecutorService.java
@@ -0,0 +1,154 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.qjournal.client;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
+import java.util.concurrent.RejectedExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+/**
+ * A very basic ExecutorService for running submitted Callables serially.
+ * Many bits of functionality are not implemented.
+ */
+public class DirectExecutorService implements ExecutorService {
+
+  private static class DirectFuture<V> implements Future<V> {
+    private V result = null;
+    private Exception ex = null;
+
+    DirectFuture(Callable<V> c) {
+      try {
+        result = c.call();
+      } catch (Exception e) {
+        ex = e;
+      }
+    }
+
+    @Override
+    public boolean cancel(boolean mayInterruptIfRunning) {
+      return false;
+    }
+
+    @Override
+    public boolean isCancelled() {
+      return false;
+    }
+
+    @Override
+    public boolean isDone() {
+      return true;
+    }
+
+    @Override
+    public V get() throws InterruptedException, ExecutionException {
+      if (ex != null) {
+        throw new ExecutionException(ex);
+      }
+      return result;
+    }
+
+    @Override
+    public V get(long timeout, TimeUnit unit)
+        throws InterruptedException, ExecutionException, TimeoutException {
+      return get();
+    }
+  }
+
+  private boolean isShutdown = false;
+
+  @Override
+  synchronized public void shutdown() {
+    isShutdown = true;
+  }
+
+  @Override
+  public List<Runnable> shutdownNow() {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public boolean isShutdown() {
+    return isShutdown;
+  }
+
+  @Override
+  synchronized public boolean isTerminated() {
+    return isShutdown;
+  }
+
+  @Override
+  public boolean awaitTermination(long timeout, TimeUnit unit)
+      throws InterruptedException {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  synchronized public <T> Future<T> submit(Callable<T> task) {
+    if (isShutdown) {
+      throw new RejectedExecutionException("ExecutorService was shutdown");
+    }
+    return new DirectFuture<>(task);
+  }
+
+  @Override
+  public <T> Future<T> submit(Runnable task, T result) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public Future<?> submit(Runnable task) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>>
tasks)
+      throws InterruptedException {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>>
tasks,
+      long timeout, TimeUnit unit) throws InterruptedException {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public <T> T invokeAny(Collection<? extends Callable<T>> tasks)
+      throws InterruptedException, ExecutionException {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public <T> T invokeAny(Collection<? extends Callable<T>> tasks, long
timeout,
+      TimeUnit unit)
+      throws InterruptedException, ExecutionException, TimeoutException {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  synchronized public void execute(Runnable command) {
+    command.run();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/543aac9f/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQJMWithFaults.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQJMWithFaults.java
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQJMWithFaults.java
index c752f23..9ada40f6 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQJMWithFaults.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQJMWithFaults.java
@@ -65,7 +65,6 @@ import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
-import com.google.common.util.concurrent.MoreExecutors;
 
 
 public class TestQJMWithFaults {
@@ -402,7 +401,7 @@ public class TestQJMWithFaults {
 
     @Override
     protected ExecutorService createSingleThreadExecutor() {
-      return MoreExecutors.newDirectExecutorService();
+      return new DirectExecutorService();
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/543aac9f/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQuorumJournalManager.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQuorumJournalManager.java
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQuorumJournalManager.java
index 9aada1d..8d92666 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQuorumJournalManager.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQuorumJournalManager.java
@@ -66,7 +66,6 @@ import org.mockito.Mockito;
 import org.mockito.stubbing.Stubber;
 
 import com.google.common.collect.Lists;
-import com.google.common.util.concurrent.MoreExecutors;
 
 /**
  * Functional tests for QuorumJournalManager.
@@ -946,7 +945,7 @@ public class TestQuorumJournalManager {
           protected ExecutorService createSingleThreadExecutor() {
             // Don't parallelize calls to the quorum in the tests.
             // This makes the tests more deterministic.
-            return MoreExecutors.newDirectExecutorService();
+            return new DirectExecutorService();
           }
         };
         

http://git-wip-us.apache.org/repos/asf/hadoop/blob/543aac9f/hadoop-project/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index c9b6522..4097a0a 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -90,7 +90,7 @@
     <findbugs.version>3.0.0</findbugs.version>
     <spotbugs.version>3.1.0-RC1</spotbugs.version>
 
-    <guava.version>21.0</guava.version>
+    <guava.version>11.0.2</guava.version>
     <guice.version>4.0</guice.version>
     <joda-time.version>2.9.4</joda-time.version>
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/543aac9f/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java
index 0148d0e..6e46eb6 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java
@@ -51,7 +51,6 @@ import org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto;
 
-import com.google.common.base.CharMatcher;
 import com.google.protobuf.TextFormat;
 
 @Private
@@ -286,7 +285,7 @@ extends ApplicationSubmissionContext {
             "maximum allowed length of a tag is " +
             YarnConfiguration.APPLICATION_MAX_TAG_LENGTH);
       }
-      if (!CharMatcher.ascii().matchesAllOf(tag)) {
+      if (!org.apache.commons.lang3.StringUtils.isAsciiPrintable(tag)) {
         throw new IllegalArgumentException("A tag can only have ASCII " +
             "characters! Invalid tag - " + tag);
       }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/543aac9f/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java
index 300bf3e..fad6fe2 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java
@@ -26,6 +26,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.http.HttpServer2;
@@ -33,7 +34,6 @@ import org.apache.hadoop.yarn.webapp.view.RobotsTextPage;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.CharMatcher;
 import com.google.common.base.Splitter;
 import com.google.common.collect.Lists;
 import com.google.inject.Provides;
@@ -275,7 +275,7 @@ public abstract class WebApp extends ServletModule {
 
   static String getPrefix(String pathSpec) {
     int start = 0;
-    while (CharMatcher.whitespace().matches(pathSpec.charAt(start))) {
+    while (StringUtils.isAnyBlank(Character.toString(pathSpec.charAt(start)))) {
       ++start;
     }
     if (pathSpec.charAt(start) != '/') {
@@ -291,7 +291,7 @@ public abstract class WebApp extends ServletModule {
     char c;
     do {
       c = pathSpec.charAt(--ci);
-    } while (c == '/' || CharMatcher.whitespace().matches(c));
+    } while (c == '/' || StringUtils.isAnyBlank(Character.toString(c)));
     return pathSpec.substring(start, ci + 1);
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/543aac9f/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java
index e49c3ed..368832a 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java
@@ -21,6 +21,8 @@ import java.util.Collections;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Future;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -68,7 +70,6 @@ import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
 import org.apache.hadoop.yarn.server.utils.BuilderUtils;
 
 import com.google.common.annotations.VisibleForTesting;
-import com.google.common.util.concurrent.Futures;
 import com.google.common.util.concurrent.SettableFuture;
 
 /**
@@ -210,6 +211,17 @@ public class RMAppManager implements EventHandler<RMAppManagerEvent>,
     ApplicationSummary.logAppSummary(rmContext.getRMApps().get(appId));
   }
 
+  private static <V> V getChecked(Future<V> future) throws YarnException {
+    try {
+      return future.get();
+    } catch (InterruptedException e) {
+      Thread.currentThread().interrupt();
+      throw new YarnException(e);
+    } catch (ExecutionException e) {
+      throw new YarnException(e);
+    }
+  }
+
   protected synchronized int getCompletedAppsListSize() {
     return this.completedApps.size(); 
   }
@@ -641,7 +653,7 @@ public class RMAppManager implements EventHandler<RMAppManagerEvent>,
       this.rmContext.getStateStore()
           .updateApplicationStateSynchronously(appState, false, future);
 
-      Futures.getChecked(future, YarnException.class);
+      getChecked(future);
 
       // update in-memory
       ((RMAppImpl) app).updateApplicationTimeout(newExpireTime);
@@ -678,7 +690,7 @@ public class RMAppManager implements EventHandler<RMAppManagerEvent>,
         return;
       }
 
-      Futures.getChecked(future, YarnException.class);
+      getChecked(future);
 
       // update in-memory
       ((RMAppImpl) app).setApplicationPriority(appPriority);
@@ -761,7 +773,7 @@ public class RMAppManager implements EventHandler<RMAppManagerEvent>,
         false, future);
 
     try {
-      Futures.getChecked(future, YarnException.class);
+      getChecked(future);
     } catch (YarnException ex) {
       if (!toSuppressException) {
         throw ex;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/543aac9f/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationFileLoaderService.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationFileLoaderService.java
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationFileLoaderService.java
index d29d34e..bc204cb 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationFileLoaderService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationFileLoaderService.java
@@ -58,7 +58,6 @@ import org.w3c.dom.NodeList;
 import org.w3c.dom.Text;
 import org.xml.sax.SAXException;
 
-import com.google.common.base.CharMatcher;
 import com.google.common.annotations.VisibleForTesting;
 
 @Public
@@ -465,7 +464,7 @@ public class AllocationFileLoaderService extends AbstractService {
       Set<String> reservableQueues,
       Set<String> nonPreemptableQueues)
       throws AllocationConfigurationException {
-    String queueName = CharMatcher.whitespace().trimFrom(
+    String queueName = FairSchedulerUtilities.trimQueueName(
         element.getAttribute("name"));
 
     if (queueName.contains(".")) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/543aac9f/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairSchedulerUtilities.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairSchedulerUtilities.java
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairSchedulerUtilities.java
new file mode 100644
index 0000000..f394a93
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairSchedulerUtilities.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair;
+
+/**
+ * Utility class for the Fair Scheduler.
+ */
+public final class FairSchedulerUtilities {
+
+  /**
+   * Table copied from Google Guava v19:
+   * com/google/common/base/CharMatcher.java
+   * <p>
+   * Licensed under the Apache License Version 2.0.
+   */
+  static final String WHITESPACE_TABLE =
+      "\u2002\u3000\r\u0085\u200A\u2005\u2000\u3000"
+          + "\u2029\u000B\u3000\u2008\u2003\u205F\u3000\u1680"
+          + "\u0009\u0020\u2006\u2001\u202F\u00A0\u000C\u2009"
+          + "\u3000\u2004\u3000\u3000\u2028\n\u2007\u3000";
+
+  private FairSchedulerUtilities() {
+    // private constructor because this is a utility class.
+  }
+
+  private static boolean isWhitespace(char c) {
+    for (int i = 0; i < WHITESPACE_TABLE.length(); i++) {
+      if (WHITESPACE_TABLE.charAt(i) == c) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  public static String trimQueueName(String name) {
+    if (name == null) {
+      return null;
+    }
+    int start = 0;
+    while (start < name.length()
+        && isWhitespace(name.charAt(start))
+        && start < name.length()) {
+      start++;
+    }
+    int end = name.length() - 1;
+    while (end >= 0
+        && isWhitespace(name.charAt(end))
+        && end > start) {
+      end--;
+    }
+    return name.substring(start, end+1);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/543aac9f/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/QueueManager.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/QueueManager.java
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/QueueManager.java
index 5b006df..c08d13e 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/QueueManager.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/QueueManager.java
@@ -38,7 +38,6 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.policies.FifoPolicy;
 import org.xml.sax.SAXException;
 
-import com.google.common.base.CharMatcher;
 import com.google.common.annotations.VisibleForTesting;
 import java.util.Iterator;
 import java.util.Set;
@@ -533,8 +532,9 @@ public class QueueManager {
   @VisibleForTesting
   boolean isQueueNameValid(String node) {
     // use the same white space trim as in QueueMetrics() otherwise things fail
-    // guava uses a different definition for whitespace than java.
+    // This needs to trim additional Unicode whitespace characters beyond what
+    // the built-in JDK methods consider whitespace. See YARN-5272.
     return !node.isEmpty() &&
-        node.equals(CharMatcher.whitespace().trimFrom(node));
+        node.equals(FairSchedulerUtilities.trimQueueName(node));
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/543aac9f/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairSchedulerUtilities.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairSchedulerUtilities.java
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairSchedulerUtilities.java
new file mode 100644
index 0000000..37f686e
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairSchedulerUtilities.java
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair;
+
+import org.junit.Test;
+
+import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairSchedulerUtilities.trimQueueName;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Tests for {@link FairSchedulerUtilities}.
+ */
+public class TestFairSchedulerUtilities {
+
+  @Test
+  public void testTrimQueueNameEquals() throws Exception {
+    final String[] equalsStrings = {
+        // no spaces
+        "a",
+        // leading spaces
+        " a",
+        " \u3000a",
+        "\u2002\u3000\r\u0085\u200A\u2005\u2000\u3000a",
+        "\u2029\u000B\u3000\u2008\u2003\u205F\u3000\u1680a",
+        "\u0009\u0020\u2006\u2001\u202F\u00A0\u000C\u2009a",
+        "\u3000\u2004\u3000\u3000\u2028\n\u2007\u3000a",
+        // trailing spaces
+        "a\u200A",
+        "a  \u0085 ",
+        // spaces on both sides
+        " a ",
+        "  a\u00A0",
+        "\u0009\u0020\u2006\u2001\u202F\u00A0\u000C\u2009a" +
+            "\u3000\u2004\u3000\u3000\u2028\n\u2007\u3000",
+    };
+    for (String s : equalsStrings) {
+      assertEquals("a", trimQueueName(s));
+    }
+  }
+
+  @Test
+  public void testTrimQueueNamesEmpty() throws Exception {
+    assertNull(trimQueueName(null));
+    final String spaces = "\u2002\u3000\r\u0085\u200A\u2005\u2000\u3000"
+        + "\u2029\u000B\u3000\u2008\u2003\u205F\u3000\u1680"
+        + "\u0009\u0020\u2006\u2001\u202F\u00A0\u000C\u2009"
+        + "\u3000\u2004\u3000\u3000\u2028\n\u2007\u3000";
+    assertTrue(trimQueueName(spaces).isEmpty());
+  }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org


Mime
View raw message