You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ro...@apache.org on 2017/08/17 09:52:08 UTC

hadoop git commit: HADOOP-14386. Rewind trunk from Guava 21.0 back to Guava 11.0.2. Contributed by Vrushali C.

Repository: hadoop
Updated Branches:
  refs/heads/YARN-5355 315ff9bdc -> dad4163da


HADOOP-14386. Rewind trunk from Guava 21.0 back to Guava 11.0.2. Contributed by Vrushali C.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/dad4163d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/dad4163d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/dad4163d

Branch: refs/heads/YARN-5355
Commit: dad4163dadc7901e068dfb5856d1023c49588063
Parents: 315ff9b
Author: Rohith Sharma K S <ro...@apache.org>
Authored: Thu Aug 17 15:19:36 2017 +0530
Committer: Rohith Sharma K S <ro...@apache.org>
Committed: Thu Aug 17 15:19:36 2017 +0530

----------------------------------------------------------------------
 .../apache/hadoop/fs/shell/XAttrCommands.java   |   7 +-
 .../apache/hadoop/metrics2/AbstractMetric.java  |  16 +-
 .../org/apache/hadoop/metrics2/MetricsTag.java  |  13 +-
 .../metrics2/impl/AbstractMetricsRecord.java    |  16 +-
 .../org/apache/hadoop/metrics2/impl/MsInfo.java |   9 +-
 .../hadoop/metrics2/lib/MetricsInfoImpl.java    |  11 +-
 .../hadoop/metrics2/lib/MetricsRegistry.java    |  20 +--
 .../hadoop/metrics2/source/JvmMetricsInfo.java  |   9 +-
 .../hadoop/metrics2/util/MetricsCache.java      |  18 +--
 .../hdfs/server/datanode/DataStorage.java       |  10 +-
 .../hdfs/server/namenode/AclTransformation.java |   7 +-
 .../hadoop/hdfs/server/namenode/JournalSet.java |  33 ++--
 .../qjournal/client/DirectExecutorService.java  | 154 +++++++++++++++++++
 .../hdfs/qjournal/client/TestQJMWithFaults.java |   3 +-
 .../client/TestQuorumJournalManager.java        |   3 +-
 hadoop-project/pom.xml                          |   2 +-
 .../hadoop/metrics2/impl/TestKafkaMetrics.java  |   8 +-
 .../pb/ApplicationSubmissionContextPBImpl.java  |   3 +-
 .../org/apache/hadoop/yarn/webapp/WebApp.java   |   6 +-
 .../server/resourcemanager/RMAppManager.java    |  20 ++-
 .../fair/AllocationFileLoaderService.java       |   3 +-
 .../scheduler/fair/FairSchedulerUtilities.java  |  69 +++++++++
 .../scheduler/fair/QueueManager.java            |   6 +-
 .../fair/TestFairSchedulerUtilities.java        |  67 ++++++++
 24 files changed, 407 insertions(+), 106 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/dad4163d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java
index 4505aa9..6301776 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java
@@ -20,12 +20,9 @@ package org.apache.hadoop.fs.shell;
 import java.io.IOException;
 import java.util.Iterator;
 import java.util.LinkedList;
-import java.util.Locale;
 import java.util.Map;
 import java.util.Map.Entry;
 
-import com.google.common.base.Enums;
-import com.google.common.base.Function;
 import com.google.common.base.Preconditions;
 
 import org.apache.hadoop.HadoopIllegalArgumentException;
@@ -66,8 +63,6 @@ class XAttrCommands extends FsCommand {
       " and values encoded as hexadecimal and base64 are prefixed with " +
       "0x and 0s, respectively.\n" +
       "<path>: The file or directory.\n";
-    private final static Function<String, XAttrCodec> enValueOfFunc =
-        Enums.stringConverter(XAttrCodec.class);
 
     private String name = null;
     private boolean dump = false;
@@ -79,7 +74,7 @@ class XAttrCommands extends FsCommand {
       String en = StringUtils.popOptionWithArgument("-e", args);
       if (en != null) {
         try {
-          encoding = enValueOfFunc.apply(StringUtils.toUpperCase(en));
+          encoding = XAttrCodec.valueOf(StringUtils.toUpperCase(en));
         } catch (IllegalArgumentException e) {
           throw new IllegalArgumentException(
               "Invalid/unsupported encoding option specified: " + en);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dad4163d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/AbstractMetric.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/AbstractMetric.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/AbstractMetric.java
index 0605156..e2574f6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/AbstractMetric.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/AbstractMetric.java
@@ -18,13 +18,14 @@
 
 package org.apache.hadoop.metrics2;
 
-import com.google.common.base.MoreObjects;
 import com.google.common.base.Objects;
-import static com.google.common.base.Preconditions.*;
-
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
+import java.util.StringJoiner;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
 /**
  * The immutable metric
  */
@@ -84,10 +85,11 @@ public abstract class AbstractMetric implements MetricsInfo {
     return Objects.hashCode(info, value());
   }
 
-  @Override public String toString() {
-    return MoreObjects.toStringHelper(this)
-        .add("info", info)
-        .add("value", value())
+  @Override
+  public String toString() {
+    return new StringJoiner(", ", this.getClass().getSimpleName() + "{", "}")
+        .add("info=" + info)
+        .add("value=" + value())
         .toString();
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dad4163d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsTag.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsTag.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsTag.java
index 68b0737..db8a5d9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsTag.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsTag.java
@@ -18,13 +18,14 @@
 
 package org.apache.hadoop.metrics2;
 
-import com.google.common.base.MoreObjects;
 import com.google.common.base.Objects;
-import static com.google.common.base.Preconditions.*;
-
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
+import java.util.StringJoiner;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
 /**
  * Immutable tag for metrics (for grouping on host/queue/username etc.)
  */
@@ -81,9 +82,9 @@ public class MetricsTag implements MetricsInfo {
   }
 
   @Override public String toString() {
-    return MoreObjects.toStringHelper(this)
-        .add("info", info)
-        .add("value", value())
+    return new StringJoiner(", ", this.getClass().getSimpleName() + "{", "}")
+        .add("info=" + info)
+        .add("value=" + value())
         .toString();
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dad4163d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/AbstractMetricsRecord.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/AbstractMetricsRecord.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/AbstractMetricsRecord.java
index fec29c2..a4632c6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/AbstractMetricsRecord.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/AbstractMetricsRecord.java
@@ -18,12 +18,12 @@
 
 package org.apache.hadoop.metrics2.impl;
 
-import com.google.common.base.MoreObjects;
 import com.google.common.base.Objects;
 import com.google.common.collect.Iterables;
-
 import org.apache.hadoop.metrics2.MetricsRecord;
 
+import java.util.StringJoiner;
+
 abstract class AbstractMetricsRecord implements MetricsRecord {
 
   @Override public boolean equals(Object obj) {
@@ -44,12 +44,12 @@ abstract class AbstractMetricsRecord implements MetricsRecord {
   }
 
   @Override public String toString() {
-    return MoreObjects.toStringHelper(this)
-        .add("timestamp", timestamp())
-        .add("name", name())
-        .add("description", description())
-        .add("tags", tags())
-        .add("metrics", Iterables.toString(metrics()))
+    return new StringJoiner(", ", this.getClass().getSimpleName() + "{", "}")
+        .add("timestamp=" + timestamp())
+        .add("name=" + name())
+        .add("description=" + description())
+        .add("tags=" + tags())
+        .add("metrics=" + Iterables.toString(metrics()))
         .toString();
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dad4163d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MsInfo.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MsInfo.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MsInfo.java
index 5de7edc..0bf5c78 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MsInfo.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MsInfo.java
@@ -18,11 +18,11 @@
 
 package org.apache.hadoop.metrics2.impl;
 
-import com.google.common.base.MoreObjects;
-
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.metrics2.MetricsInfo;
 
+import java.util.StringJoiner;
+
 /**
  * Metrics system related metrics info instances
  */
@@ -48,8 +48,9 @@ public enum MsInfo implements MetricsInfo {
   }
 
   @Override public String toString() {
-    return MoreObjects.toStringHelper(this)
-        .add("name", name()).add("description", desc)
+    return new StringJoiner(", ", this.getClass().getSimpleName() + "{", "}")
+        .add("name=" + name())
+        .add("description=" + desc)
         .toString();
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dad4163d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsInfoImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsInfoImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsInfoImpl.java
index 054f211..e3adc82 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsInfoImpl.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsInfoImpl.java
@@ -18,11 +18,13 @@
 
 package org.apache.hadoop.metrics2.lib;
 
-import com.google.common.base.MoreObjects;
 import com.google.common.base.Objects;
-import static com.google.common.base.Preconditions.*;
 import org.apache.hadoop.metrics2.MetricsInfo;
 
+import java.util.StringJoiner;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
 /**
  * Making implementing metric info a little easier
  */
@@ -56,8 +58,9 @@ class MetricsInfoImpl implements MetricsInfo {
   }
 
   @Override public String toString() {
-    return MoreObjects.toStringHelper(this)
-        .add("name", name).add("description", description)
+    return new StringJoiner(", ", this.getClass().getSimpleName() + "{", "}")
+        .add("name=" + name)
+        .add("description=" + description)
         .toString();
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dad4163d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsRegistry.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsRegistry.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsRegistry.java
index 7070869..9727954 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsRegistry.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsRegistry.java
@@ -18,20 +18,19 @@
 
 package org.apache.hadoop.metrics2.lib;
 
-import java.util.Collection;
-import java.util.Map;
-
 import com.google.common.collect.Maps;
-import com.google.common.base.MoreObjects;
-
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.metrics2.MetricsInfo;
 import org.apache.hadoop.metrics2.MetricsException;
+import org.apache.hadoop.metrics2.MetricsInfo;
 import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 import org.apache.hadoop.metrics2.MetricsTag;
 import org.apache.hadoop.metrics2.impl.MsInfo;
 
+import java.util.Collection;
+import java.util.Map;
+import java.util.StringJoiner;
+
 /**
  * An optional metrics registry class for creating and maintaining a
  * collection of MetricsMutables, making writing metrics source easier.
@@ -440,9 +439,12 @@ public class MetricsRegistry {
     }
   }
 
-  @Override public String toString() {
-    return MoreObjects.toStringHelper(this)
-        .add("info", metricsInfo).add("tags", tags()).add("metrics", metrics())
+  @Override
+  public String toString() {
+    return new StringJoiner(", ", this.getClass().getSimpleName() + "{", "}")
+        .add("info=" + metricsInfo.toString())
+        .add("tags=" + tags())
+        .add("metrics=" + metrics())
         .toString();
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dad4163d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/source/JvmMetricsInfo.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/source/JvmMetricsInfo.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/source/JvmMetricsInfo.java
index 59a79fd..8da6785 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/source/JvmMetricsInfo.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/source/JvmMetricsInfo.java
@@ -21,7 +21,7 @@ package org.apache.hadoop.metrics2.source;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.metrics2.MetricsInfo;
 
-import com.google.common.base.MoreObjects;
+import java.util.StringJoiner;
 
 /**
  * JVM and logging related metrics info instances
@@ -60,8 +60,9 @@ public enum JvmMetricsInfo implements MetricsInfo {
   @Override public String description() { return desc; }
 
   @Override public String toString() {
-    return MoreObjects.toStringHelper(this)
-      .add("name", name()).add("description", desc)
-      .toString();
+    return new StringJoiner(", ", this.getClass().getSimpleName() + "{", "}")
+        .add("name=" + name())
+        .add("description=" + desc)
+        .toString();
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dad4163d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MetricsCache.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MetricsCache.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MetricsCache.java
index 753e307..cfd126c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MetricsCache.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MetricsCache.java
@@ -18,11 +18,7 @@
 
 package org.apache.hadoop.metrics2.util;
 
-import java.util.Collection;
-import java.util.LinkedHashMap;
-import java.util.Map;
-import java.util.Set;
-
+import com.google.common.collect.Maps;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -31,8 +27,11 @@ import org.apache.hadoop.metrics2.AbstractMetric;
 import org.apache.hadoop.metrics2.MetricsRecord;
 import org.apache.hadoop.metrics2.MetricsTag;
 
-import com.google.common.base.MoreObjects;
-import com.google.common.collect.Maps;
+import java.util.Collection;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Set;
+import java.util.StringJoiner;
 
 /**
  * A metrics cache for sinks that don't support sparse updates.
@@ -127,8 +126,9 @@ public class MetricsCache {
     }
 
     @Override public String toString() {
-      return MoreObjects.toStringHelper(this)
-          .add("tags", tags).add("metrics", metrics)
+      return new StringJoiner(", ", this.getClass().getSimpleName() + "{", "}")
+          .add("tags=" + tags)
+          .add("metrics=" + metrics)
           .toString();
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dad4163d/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
index 835643b..9a71081 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
@@ -65,7 +65,6 @@ import com.google.common.annotations.VisibleForTesting;
 import com.google.common.collect.ComparisonChain;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
-import com.google.common.util.concurrent.Futures;
 
 /** 
  * Data storage information file.
@@ -1109,7 +1108,14 @@ public class DataStorage extends Storage {
     }
     linkWorkers.shutdown();
     for (Future<Void> f : futures) {
-      Futures.getChecked(f, IOException.class);
+      try {
+        f.get();
+      } catch (InterruptedException e) {
+        Thread.currentThread().interrupt();
+        throw new IOException(e);
+      } catch (ExecutionException e) {
+        throw new IOException(e);
+      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dad4163d/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/AclTransformation.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/AclTransformation.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/AclTransformation.java
index 3e4a319..4402e26 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/AclTransformation.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/AclTransformation.java
@@ -28,7 +28,6 @@ import java.util.EnumSet;
 import java.util.Iterator;
 import java.util.List;
 
-import com.google.common.base.MoreObjects;
 import com.google.common.collect.ComparisonChain;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
@@ -366,8 +365,10 @@ final class AclTransformation {
     for (AclEntry entry: aclBuilder) {
       scopeFound.add(entry.getScope());
       if (entry.getType() == GROUP || entry.getName() != null) {
-        FsAction scopeUnionPerms = MoreObjects.firstNonNull(
-          unionPerms.get(entry.getScope()), FsAction.NONE);
+        FsAction scopeUnionPerms = unionPerms.get(entry.getScope());
+        if (scopeUnionPerms == null) {
+          scopeUnionPerms = FsAction.NONE;
+        }
         unionPerms.put(entry.getScope(),
           scopeUnionPerms.or(entry.getPermission()));
       }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dad4163d/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java
index db77d31..e7f2adb 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java
@@ -40,7 +40,6 @@ import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog;
 import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest;
 
 import com.google.common.base.Preconditions;
-import com.google.common.collect.ComparisonChain;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableListMultimap;
 import com.google.common.collect.Lists;
@@ -57,28 +56,18 @@ public class JournalSet implements JournalManager {
 
   static final Log LOG = LogFactory.getLog(FSEditLog.class);
 
+  // we want local logs to be ordered earlier in the collection, and true
+  // is considered larger than false, so reverse the comparator
   private static final Comparator<EditLogInputStream>
-    LOCAL_LOG_PREFERENCE_COMPARATOR = new Comparator<EditLogInputStream>() {
-    @Override
-    public int compare(EditLogInputStream elis1, EditLogInputStream elis2) {
-      // we want local logs to be ordered earlier in the collection, and true
-      // is considered larger than false, so we want to invert the booleans here
-      return ComparisonChain.start().compareFalseFirst(!elis1.isLocalLog(),
-          !elis2.isLocalLog()).result();
-    }
-  };
-  
-  static final public Comparator<EditLogInputStream>
-    EDIT_LOG_INPUT_STREAM_COMPARATOR = new Comparator<EditLogInputStream>() {
-      @Override
-      public int compare(EditLogInputStream a, EditLogInputStream b) {
-        return ComparisonChain.start().
-          compare(a.getFirstTxId(), b.getFirstTxId()).
-          compare(b.getLastTxId(), a.getLastTxId()).
-          result();
-      }
-    };
-  
+      LOCAL_LOG_PREFERENCE_COMPARATOR = Comparator
+      .comparing(EditLogInputStream::isLocalLog)
+      .reversed();
+
+  public static final Comparator<EditLogInputStream>
+      EDIT_LOG_INPUT_STREAM_COMPARATOR = Comparator
+      .comparing(EditLogInputStream::getFirstTxId)
+      .thenComparing(EditLogInputStream::getLastTxId);
+
   /**
    * Container for a JournalManager paired with its currently
    * active stream.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dad4163d/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/DirectExecutorService.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/DirectExecutorService.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/DirectExecutorService.java
new file mode 100644
index 0000000..15d2a13
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/DirectExecutorService.java
@@ -0,0 +1,154 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.qjournal.client;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
+import java.util.concurrent.RejectedExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+/**
+ * A very basic ExecutorService for running submitted Callables serially.
+ * Many bits of functionality are not implemented.
+ */
+public class DirectExecutorService implements ExecutorService {
+
+  private static class DirectFuture<V> implements Future<V> {
+    private V result = null;
+    private Exception ex = null;
+
+    DirectFuture(Callable<V> c) {
+      try {
+        result = c.call();
+      } catch (Exception e) {
+        ex = e;
+      }
+    }
+
+    @Override
+    public boolean cancel(boolean mayInterruptIfRunning) {
+      return false;
+    }
+
+    @Override
+    public boolean isCancelled() {
+      return false;
+    }
+
+    @Override
+    public boolean isDone() {
+      return true;
+    }
+
+    @Override
+    public V get() throws InterruptedException, ExecutionException {
+      if (ex != null) {
+        throw new ExecutionException(ex);
+      }
+      return result;
+    }
+
+    @Override
+    public V get(long timeout, TimeUnit unit)
+        throws InterruptedException, ExecutionException, TimeoutException {
+      return get();
+    }
+  }
+
+  private boolean isShutdown = false;
+
+  @Override
+  synchronized public void shutdown() {
+    isShutdown = true;
+  }
+
+  @Override
+  public List<Runnable> shutdownNow() {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public boolean isShutdown() {
+    return isShutdown;
+  }
+
+  @Override
+  synchronized public boolean isTerminated() {
+    return isShutdown;
+  }
+
+  @Override
+  public boolean awaitTermination(long timeout, TimeUnit unit)
+      throws InterruptedException {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  synchronized public <T> Future<T> submit(Callable<T> task) {
+    if (isShutdown) {
+      throw new RejectedExecutionException("ExecutorService was shutdown");
+    }
+    return new DirectFuture<>(task);
+  }
+
+  @Override
+  public <T> Future<T> submit(Runnable task, T result) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public Future<?> submit(Runnable task) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks)
+      throws InterruptedException {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks,
+      long timeout, TimeUnit unit) throws InterruptedException {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public <T> T invokeAny(Collection<? extends Callable<T>> tasks)
+      throws InterruptedException, ExecutionException {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public <T> T invokeAny(Collection<? extends Callable<T>> tasks, long timeout,
+      TimeUnit unit)
+      throws InterruptedException, ExecutionException, TimeoutException {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  synchronized public void execute(Runnable command) {
+    command.run();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dad4163d/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQJMWithFaults.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQJMWithFaults.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQJMWithFaults.java
index c752f23..9ada40f6 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQJMWithFaults.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQJMWithFaults.java
@@ -65,7 +65,6 @@ import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
-import com.google.common.util.concurrent.MoreExecutors;
 
 
 public class TestQJMWithFaults {
@@ -402,7 +401,7 @@ public class TestQJMWithFaults {
 
     @Override
     protected ExecutorService createSingleThreadExecutor() {
-      return MoreExecutors.newDirectExecutorService();
+      return new DirectExecutorService();
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dad4163d/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQuorumJournalManager.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQuorumJournalManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQuorumJournalManager.java
index 9aada1d..8d92666 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQuorumJournalManager.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQuorumJournalManager.java
@@ -66,7 +66,6 @@ import org.mockito.Mockito;
 import org.mockito.stubbing.Stubber;
 
 import com.google.common.collect.Lists;
-import com.google.common.util.concurrent.MoreExecutors;
 
 /**
  * Functional tests for QuorumJournalManager.
@@ -946,7 +945,7 @@ public class TestQuorumJournalManager {
           protected ExecutorService createSingleThreadExecutor() {
             // Don't parallelize calls to the quorum in the tests.
             // This makes the tests more deterministic.
-            return MoreExecutors.newDirectExecutorService();
+            return new DirectExecutorService();
           }
         };
         

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dad4163d/hadoop-project/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index af3b1d5..5ce7964 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -520,7 +520,7 @@
       <dependency>
         <groupId>com.google.guava</groupId>
         <artifactId>guava</artifactId>
-        <version>21.0</version>
+        <version>11.0.2</version>
       </dependency>
       <dependency>
         <groupId>com.google.code.gson</groupId>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dad4163d/hadoop-tools/hadoop-kafka/src/test/java/org/apache/hadoop/metrics2/impl/TestKafkaMetrics.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-kafka/src/test/java/org/apache/hadoop/metrics2/impl/TestKafkaMetrics.java b/hadoop-tools/hadoop-kafka/src/test/java/org/apache/hadoop/metrics2/impl/TestKafkaMetrics.java
index 8479298..cf2d783 100644
--- a/hadoop-tools/hadoop-kafka/src/test/java/org/apache/hadoop/metrics2/impl/TestKafkaMetrics.java
+++ b/hadoop-tools/hadoop-kafka/src/test/java/org/apache/hadoop/metrics2/impl/TestKafkaMetrics.java
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.metrics2.impl;
 
-import com.google.common.base.MoreObjects;
 import com.google.common.collect.Lists;
 import org.apache.commons.configuration2.SubsetConfiguration;
 import org.apache.hadoop.metrics2.AbstractMetric;
@@ -40,6 +39,7 @@ import org.slf4j.LoggerFactory;
 import java.net.InetAddress;
 import java.text.SimpleDateFormat;
 import java.util.Date;
+import java.util.StringJoiner;
 import java.util.concurrent.Future;
 
 import static org.junit.Assert.assertEquals;
@@ -74,8 +74,10 @@ public class TestKafkaMetrics {
 
     @Override
     public String toString() {
-      return MoreObjects.toStringHelper(this).add("name", name())
-          .add("description", desc).toString();
+      return new StringJoiner(", ", this.getClass().getSimpleName() + "{", "}")
+          .add("name=" + name())
+          .add("description=" + desc)
+          .toString();
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dad4163d/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java
index 0148d0e..6e46eb6 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java
@@ -51,7 +51,6 @@ import org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto;
 
-import com.google.common.base.CharMatcher;
 import com.google.protobuf.TextFormat;
 
 @Private
@@ -286,7 +285,7 @@ extends ApplicationSubmissionContext {
             "maximum allowed length of a tag is " +
             YarnConfiguration.APPLICATION_MAX_TAG_LENGTH);
       }
-      if (!CharMatcher.ascii().matchesAllOf(tag)) {
+      if (!org.apache.commons.lang3.StringUtils.isAsciiPrintable(tag)) {
         throw new IllegalArgumentException("A tag can only have ASCII " +
             "characters! Invalid tag - " + tag);
       }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dad4163d/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java
index 300bf3e..fad6fe2 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java
@@ -26,6 +26,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.http.HttpServer2;
@@ -33,7 +34,6 @@ import org.apache.hadoop.yarn.webapp.view.RobotsTextPage;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.CharMatcher;
 import com.google.common.base.Splitter;
 import com.google.common.collect.Lists;
 import com.google.inject.Provides;
@@ -275,7 +275,7 @@ public abstract class WebApp extends ServletModule {
 
   static String getPrefix(String pathSpec) {
     int start = 0;
-    while (CharMatcher.whitespace().matches(pathSpec.charAt(start))) {
+    while (StringUtils.isAnyBlank(Character.toString(pathSpec.charAt(start)))) {
       ++start;
     }
     if (pathSpec.charAt(start) != '/') {
@@ -291,7 +291,7 @@ public abstract class WebApp extends ServletModule {
     char c;
     do {
       c = pathSpec.charAt(--ci);
-    } while (c == '/' || CharMatcher.whitespace().matches(c));
+    } while (c == '/' || StringUtils.isAnyBlank(Character.toString(c)));
     return pathSpec.substring(start, ci + 1);
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dad4163d/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java
index e0cff7b..6fb7cd8 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java
@@ -21,6 +21,8 @@ import java.util.Collections;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Future;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -68,7 +70,6 @@ import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
 import org.apache.hadoop.yarn.server.utils.BuilderUtils;
 
 import com.google.common.annotations.VisibleForTesting;
-import com.google.common.util.concurrent.Futures;
 import com.google.common.util.concurrent.SettableFuture;
 
 /**
@@ -209,6 +210,17 @@ public class RMAppManager implements EventHandler<RMAppManagerEvent>,
     ApplicationSummary.logAppSummary(rmContext.getRMApps().get(appId));
   }
 
+  private static <V> V getChecked(Future<V> future) throws YarnException {
+    try {
+      return future.get();
+    } catch (InterruptedException e) {
+      Thread.currentThread().interrupt();
+      throw new YarnException(e);
+    } catch (ExecutionException e) {
+      throw new YarnException(e);
+    }
+  }
+
   protected synchronized int getCompletedAppsListSize() {
     return this.completedApps.size(); 
   }
@@ -640,7 +652,7 @@ public class RMAppManager implements EventHandler<RMAppManagerEvent>,
       this.rmContext.getStateStore()
           .updateApplicationStateSynchronously(appState, false, future);
 
-      Futures.getChecked(future, YarnException.class);
+      getChecked(future);
 
       // update in-memory
       ((RMAppImpl) app).updateApplicationTimeout(newExpireTime);
@@ -677,7 +689,7 @@ public class RMAppManager implements EventHandler<RMAppManagerEvent>,
         return;
       }
 
-      Futures.getChecked(future, YarnException.class);
+      getChecked(future);
 
       // update in-memory
       ((RMAppImpl) app).setApplicationPriority(appPriority);
@@ -760,7 +772,7 @@ public class RMAppManager implements EventHandler<RMAppManagerEvent>,
         false, future);
 
     try {
-      Futures.getChecked(future, YarnException.class);
+      getChecked(future);
     } catch (YarnException ex) {
       if (!toSuppressException) {
         throw ex;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dad4163d/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationFileLoaderService.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationFileLoaderService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationFileLoaderService.java
index d29d34e..bc204cb 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationFileLoaderService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationFileLoaderService.java
@@ -58,7 +58,6 @@ import org.w3c.dom.NodeList;
 import org.w3c.dom.Text;
 import org.xml.sax.SAXException;
 
-import com.google.common.base.CharMatcher;
 import com.google.common.annotations.VisibleForTesting;
 
 @Public
@@ -465,7 +464,7 @@ public class AllocationFileLoaderService extends AbstractService {
       Set<String> reservableQueues,
       Set<String> nonPreemptableQueues)
       throws AllocationConfigurationException {
-    String queueName = CharMatcher.whitespace().trimFrom(
+    String queueName = FairSchedulerUtilities.trimQueueName(
         element.getAttribute("name"));
 
     if (queueName.contains(".")) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dad4163d/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairSchedulerUtilities.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairSchedulerUtilities.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairSchedulerUtilities.java
new file mode 100644
index 0000000..f394a93
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairSchedulerUtilities.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair;
+
+/**
+ * Utility class for the Fair Scheduler.
+ */
+public final class FairSchedulerUtilities {
+
+  /**
+   * Table copied from Google Guava v19:
+   * com/google/common/base/CharMatcher.java
+   * <p>
+   * Licensed under the Apache License Version 2.0.
+   */
+  static final String WHITESPACE_TABLE =
+      "\u2002\u3000\r\u0085\u200A\u2005\u2000\u3000"
+          + "\u2029\u000B\u3000\u2008\u2003\u205F\u3000\u1680"
+          + "\u0009\u0020\u2006\u2001\u202F\u00A0\u000C\u2009"
+          + "\u3000\u2004\u3000\u3000\u2028\n\u2007\u3000";
+
+  private FairSchedulerUtilities() {
+    // private constructor because this is a utility class.
+  }
+
+  private static boolean isWhitespace(char c) {
+    for (int i = 0; i < WHITESPACE_TABLE.length(); i++) {
+      if (WHITESPACE_TABLE.charAt(i) == c) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  public static String trimQueueName(String name) {
+    if (name == null) {
+      return null;
+    }
+    int start = 0;
+    while (start < name.length()
+        && isWhitespace(name.charAt(start))
+        && start < name.length()) {
+      start++;
+    }
+    int end = name.length() - 1;
+    while (end >= 0
+        && isWhitespace(name.charAt(end))
+        && end > start) {
+      end--;
+    }
+    return name.substring(start, end+1);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dad4163d/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/QueueManager.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/QueueManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/QueueManager.java
index 5b006df..c08d13e 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/QueueManager.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/QueueManager.java
@@ -38,7 +38,6 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.policies.FifoPolicy;
 import org.xml.sax.SAXException;
 
-import com.google.common.base.CharMatcher;
 import com.google.common.annotations.VisibleForTesting;
 import java.util.Iterator;
 import java.util.Set;
@@ -533,8 +532,9 @@ public class QueueManager {
   @VisibleForTesting
   boolean isQueueNameValid(String node) {
     // use the same white space trim as in QueueMetrics() otherwise things fail
-    // guava uses a different definition for whitespace than java.
+    // This needs to trim additional Unicode whitespace characters beyond what
+    // the built-in JDK methods consider whitespace. See YARN-5272.
     return !node.isEmpty() &&
-        node.equals(CharMatcher.whitespace().trimFrom(node));
+        node.equals(FairSchedulerUtilities.trimQueueName(node));
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dad4163d/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairSchedulerUtilities.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairSchedulerUtilities.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairSchedulerUtilities.java
new file mode 100644
index 0000000..37f686e
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairSchedulerUtilities.java
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair;
+
+import org.junit.Test;
+
+import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairSchedulerUtilities.trimQueueName;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Tests for {@link FairSchedulerUtilities}.
+ */
+public class TestFairSchedulerUtilities {
+
+  @Test
+  public void testTrimQueueNameEquals() throws Exception {
+    final String[] equalsStrings = {
+        // no spaces
+        "a",
+        // leading spaces
+        " a",
+        " \u3000a",
+        "\u2002\u3000\r\u0085\u200A\u2005\u2000\u3000a",
+        "\u2029\u000B\u3000\u2008\u2003\u205F\u3000\u1680a",
+        "\u0009\u0020\u2006\u2001\u202F\u00A0\u000C\u2009a",
+        "\u3000\u2004\u3000\u3000\u2028\n\u2007\u3000a",
+        // trailing spaces
+        "a\u200A",
+        "a  \u0085 ",
+        // spaces on both sides
+        " a ",
+        "  a\u00A0",
+        "\u0009\u0020\u2006\u2001\u202F\u00A0\u000C\u2009a" +
+            "\u3000\u2004\u3000\u3000\u2028\n\u2007\u3000",
+    };
+    for (String s : equalsStrings) {
+      assertEquals("a", trimQueueName(s));
+    }
+  }
+
+  @Test
+  public void testTrimQueueNamesEmpty() throws Exception {
+    assertNull(trimQueueName(null));
+    final String spaces = "\u2002\u3000\r\u0085\u200A\u2005\u2000\u3000"
+        + "\u2029\u000B\u3000\u2008\u2003\u205F\u3000\u1680"
+        + "\u0009\u0020\u2006\u2001\u202F\u00A0\u000C\u2009"
+        + "\u3000\u2004\u3000\u3000\u2028\n\u2007\u3000";
+    assertTrue(trimQueueName(spaces).isEmpty());
+  }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org