You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ro...@apache.org on 2017/05/17 02:58:52 UTC

hadoop git commit: HADOOP-14412. HostsFileReader#getHostDetails is very expensive on large clusters. Contributed by Jason Lowe.

Repository: hadoop
Updated Branches:
  refs/heads/trunk ec21ce425 -> d87a63a90


HADOOP-14412. HostsFileReader#getHostDetails is very expensive on large clusters. Contributed by Jason Lowe.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d87a63a9
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d87a63a9
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d87a63a9

Branch: refs/heads/trunk
Commit: d87a63a9019d74a1c338c724e050952843a153e5
Parents: ec21ce4
Author: Rohith Sharma K S <ro...@apache.org>
Authored: Wed May 17 08:26:29 2017 +0530
Committer: Rohith Sharma K S <ro...@apache.org>
Committed: Wed May 17 08:27:45 2017 +0530

----------------------------------------------------------------------
 .../org/apache/hadoop/util/HostsFileReader.java | 263 ++++++++++---------
 .../apache/hadoop/util/TestHostsFileReader.java |  19 +-
 .../resourcemanager/NodesListManager.java       |  30 +--
 3 files changed, 161 insertions(+), 151 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d87a63a9/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java
index 2ef1ead..2913b87 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java
@@ -20,13 +20,12 @@ package org.apache.hadoop.util;
 
 import java.io.*;
 import java.nio.charset.StandardCharsets;
+import java.util.Collections;
 import java.util.Set;
 import java.util.HashMap;
 import java.util.HashSet;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
-import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock;
-import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
 import java.util.Map;
+import java.util.concurrent.atomic.AtomicReference;
 
 import javax.xml.parsers.DocumentBuilder;
 import javax.xml.parsers.DocumentBuilderFactory;
@@ -48,39 +47,26 @@ import org.xml.sax.SAXException;
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 @InterfaceStability.Unstable
 public class HostsFileReader {
-  private Set<String> includes;
-  // exclude host list with optional timeout.
-  // If the value is null, it indicates default timeout.
-  private Map<String, Integer> excludes;
-  private String includesFile;
-  private String excludesFile;
-  private WriteLock writeLock;
-  private ReadLock readLock;
-  
   private static final Log LOG = LogFactory.getLog(HostsFileReader.class);
 
-  public HostsFileReader(String inFile, 
+  private final AtomicReference<HostDetails> current;
+
+  public HostsFileReader(String inFile,
                          String exFile) throws IOException {
-    includes = new HashSet<String>();
-    excludes = new HashMap<String, Integer>();
-    includesFile = inFile;
-    excludesFile = exFile;
-    ReentrantReadWriteLock rwLock = new ReentrantReadWriteLock();
-    this.writeLock = rwLock.writeLock();
-    this.readLock = rwLock.readLock();
-    refresh();
+    HostDetails hostDetails = new HostDetails(
+        inFile, Collections.emptySet(),
+        exFile, Collections.emptyMap());
+    current = new AtomicReference<>(hostDetails);
+    refresh(inFile, exFile);
   }
 
   @Private
   public HostsFileReader(String includesFile, InputStream inFileInputStream,
       String excludesFile, InputStream exFileInputStream) throws IOException {
-    includes = new HashSet<String>();
-    excludes = new HashMap<String, Integer>();
-    this.includesFile = includesFile;
-    this.excludesFile = excludesFile;
-    ReentrantReadWriteLock rwLock = new ReentrantReadWriteLock();
-    this.writeLock = rwLock.writeLock();
-    this.readLock = rwLock.readLock();
+    HostDetails hostDetails = new HostDetails(
+        includesFile, Collections.emptySet(),
+        excludesFile, Collections.emptyMap());
+    current = new AtomicReference<>(hostDetails);
     refresh(inFileInputStream, exFileInputStream);
   }
 
@@ -126,12 +112,8 @@ public class HostsFileReader {
   }
 
   public void refresh() throws IOException {
-    this.writeLock.lock();
-    try {
-      refresh(includesFile, excludesFile);
-    } finally {
-      this.writeLock.unlock();
-    }
+    HostDetails hostDetails = current.get();
+    refresh(hostDetails.includesFile, hostDetails.excludesFile);
   }
 
   public static void readFileToMap(String type,
@@ -201,128 +183,163 @@ public class HostsFileReader {
     return (nodes.getLength() == 0)? null : nodes.item(0).getTextContent();
   }
 
-  public void refresh(String includeFiles, String excludeFiles)
+  public void refresh(String includesFile, String excludesFile)
       throws IOException {
     LOG.info("Refreshing hosts (include/exclude) list");
-    this.writeLock.lock();
-    try {
-      // update instance variables
-      updateFileNames(includeFiles, excludeFiles);
-      Set<String> newIncludes = new HashSet<String>();
-      Map<String, Integer> newExcludes = new HashMap<String, Integer>();
-      boolean switchIncludes = false;
-      boolean switchExcludes = false;
-      if (includeFiles != null && !includeFiles.isEmpty()) {
-        readFileToSet("included", includeFiles, newIncludes);
-        switchIncludes = true;
-      }
-      if (excludeFiles != null && !excludeFiles.isEmpty()) {
-        readFileToMap("excluded", excludeFiles, newExcludes);
-        switchExcludes = true;
-      }
-
-      if (switchIncludes) {
-        // switch the new hosts that are to be included
-        includes = newIncludes;
-      }
-      if (switchExcludes) {
-        // switch the excluded hosts
-        excludes = newExcludes;
-      }
-    } finally {
-      this.writeLock.unlock();
+    HostDetails oldDetails = current.get();
+    Set<String> newIncludes = oldDetails.includes;
+    Map<String, Integer> newExcludes = oldDetails.excludes;
+    if (includesFile != null && !includesFile.isEmpty()) {
+      newIncludes = new HashSet<>();
+      readFileToSet("included", includesFile, newIncludes);
+      newIncludes = Collections.unmodifiableSet(newIncludes);
+    }
+    if (excludesFile != null && !excludesFile.isEmpty()) {
+      newExcludes = new HashMap<>();
+      readFileToMap("excluded", excludesFile, newExcludes);
+      newExcludes = Collections.unmodifiableMap(newExcludes);
     }
+    HostDetails newDetails = new HostDetails(includesFile, newIncludes,
+        excludesFile, newExcludes);
+    current.set(newDetails);
   }
 
   @Private
   public void refresh(InputStream inFileInputStream,
       InputStream exFileInputStream) throws IOException {
     LOG.info("Refreshing hosts (include/exclude) list");
-    this.writeLock.lock();
-    try {
-      Set<String> newIncludes = new HashSet<String>();
-      Map<String, Integer> newExcludes = new HashMap<String, Integer>();
-      boolean switchIncludes = false;
-      boolean switchExcludes = false;
-      if (inFileInputStream != null) {
-        readFileToSetWithFileInputStream("included", includesFile,
-            inFileInputStream, newIncludes);
-        switchIncludes = true;
-      }
-      if (exFileInputStream != null) {
-        readFileToMapWithFileInputStream("excluded", excludesFile,
-            exFileInputStream, newExcludes);
-        switchExcludes = true;
-      }
-      if (switchIncludes) {
-        // switch the new hosts that are to be included
-        includes = newIncludes;
-      }
-      if (switchExcludes) {
-        // switch the excluded hosts
-        excludes = newExcludes;
-      }
-    } finally {
-      this.writeLock.unlock();
+    HostDetails oldDetails = current.get();
+    Set<String> newIncludes = oldDetails.includes;
+    Map<String, Integer> newExcludes = oldDetails.excludes;
+    if (inFileInputStream != null) {
+      newIncludes = new HashSet<>();
+      readFileToSetWithFileInputStream("included", oldDetails.includesFile,
+          inFileInputStream, newIncludes);
+      newIncludes = Collections.unmodifiableSet(newIncludes);
+    }
+    if (exFileInputStream != null) {
+      newExcludes = new HashMap<>();
+      readFileToMapWithFileInputStream("excluded", oldDetails.excludesFile,
+          exFileInputStream, newExcludes);
+      newExcludes = Collections.unmodifiableMap(newExcludes);
     }
+    HostDetails newDetails = new HostDetails(
+        oldDetails.includesFile, newIncludes,
+        oldDetails.excludesFile, newExcludes);
+    current.set(newDetails);
   }
 
   public Set<String> getHosts() {
-    this.readLock.lock();
-    try {
-      return includes;
-    } finally {
-      this.readLock.unlock();
-    }
+    HostDetails hostDetails = current.get();
+    return hostDetails.getIncludedHosts();
   }
 
   public Set<String> getExcludedHosts() {
-    this.readLock.lock();
-    try {
-      return excludes.keySet();
-    } finally {
-      this.readLock.unlock();
-    }
+    HostDetails hostDetails = current.get();
+    return hostDetails.getExcludedHosts();
   }
 
+  /**
+   * Retrieve an atomic view of the included and excluded hosts.
+   *
+   * @param includes set to populate with included hosts
+   * @param excludes set to populate with excluded hosts
+   * @deprecated use {@link #getHostDetails() instead}
+   */
+  @Deprecated
   public void getHostDetails(Set<String> includes, Set<String> excludes) {
-    this.readLock.lock();
-    try {
-      includes.addAll(this.includes);
-      excludes.addAll(this.excludes.keySet());
-    } finally {
-      this.readLock.unlock();
-    }
+    HostDetails hostDetails = current.get();
+    includes.addAll(hostDetails.getIncludedHosts());
+    excludes.addAll(hostDetails.getExcludedHosts());
   }
 
+  /**
+   * Retrieve an atomic view of the included and excluded hosts.
+   *
+   * @param includeHosts set to populate with included hosts
+   * @param excludeHosts map to populate with excluded hosts
+   * @deprecated use {@link #getHostDetails() instead}
+   */
+  @Deprecated
   public void getHostDetails(Set<String> includeHosts,
                              Map<String, Integer> excludeHosts) {
-    this.readLock.lock();
-    try {
-      includeHosts.addAll(this.includes);
-      excludeHosts.putAll(this.excludes);
-    } finally {
-      this.readLock.unlock();
-    }
+    HostDetails hostDetails = current.get();
+    includeHosts.addAll(hostDetails.getIncludedHosts());
+    excludeHosts.putAll(hostDetails.getExcludedMap());
+  }
+
+  /**
+   * Retrieve an atomic view of the included and excluded hosts.
+   *
+   * @return the included and excluded hosts
+   */
+  public HostDetails getHostDetails() {
+    return current.get();
   }
 
   public void setIncludesFile(String includesFile) {
     LOG.info("Setting the includes file to " + includesFile);
-    this.includesFile = includesFile;
+    HostDetails oldDetails = current.get();
+    HostDetails newDetails = new HostDetails(includesFile, oldDetails.includes,
+        oldDetails.excludesFile, oldDetails.excludes);
+    current.set(newDetails);
   }
   
   public void setExcludesFile(String excludesFile) {
     LOG.info("Setting the excludes file to " + excludesFile);
-    this.excludesFile = excludesFile;
+    HostDetails oldDetails = current.get();
+    HostDetails newDetails = new HostDetails(
+        oldDetails.includesFile, oldDetails.includes,
+        excludesFile, oldDetails.excludes);
+    current.set(newDetails);
   }
 
-  public void updateFileNames(String includeFiles, String excludeFiles) {
-    this.writeLock.lock();
-    try {
-      setIncludesFile(includeFiles);
-      setExcludesFile(excludeFiles);
-    } finally {
-      this.writeLock.unlock();
+  public void updateFileNames(String includesFile, String excludesFile) {
+    LOG.info("Setting the includes file to " + includesFile);
+    LOG.info("Setting the excludes file to " + excludesFile);
+    HostDetails oldDetails = current.get();
+    HostDetails newDetails = new HostDetails(includesFile, oldDetails.includes,
+        excludesFile, oldDetails.excludes);
+    current.set(newDetails);
+  }
+
+  /**
+   * An atomic view of the included and excluded hosts
+   */
+  public static class HostDetails {
+    private final String includesFile;
+    private final Set<String> includes;
+    private final String excludesFile;
+    // exclude host list with optional timeout.
+    // If the value is null, it indicates default timeout.
+    private final Map<String, Integer> excludes;
+
+    HostDetails(String includesFile, Set<String> includes,
+        String excludesFile, Map<String, Integer> excludes) {
+      this.includesFile = includesFile;
+      this.includes = includes;
+      this.excludesFile = excludesFile;
+      this.excludes = excludes;
+    }
+
+    public String getIncludesFile() {
+      return includesFile;
+    }
+
+    public Set<String> getIncludedHosts() {
+      return includes;
+    }
+
+    public String getExcludesFile() {
+      return excludesFile;
+    }
+
+    public Set<String> getExcludedHosts() {
+      return excludes.keySet();
+    }
+
+    public Map<String, Integer> getExcludedMap() {
+      return excludes;
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d87a63a9/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestHostsFileReader.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestHostsFileReader.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestHostsFileReader.java
index 5766591..2462114 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestHostsFileReader.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestHostsFileReader.java
@@ -20,12 +20,10 @@ package org.apache.hadoop.util;
 import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.FileWriter;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Set;
 import java.util.Map;
 
 import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.util.HostsFileReader.HostDetails;
 import org.junit.*;
 
 import static org.junit.Assert.*;
@@ -121,11 +119,11 @@ public class TestHostsFileReader {
     assertTrue(hfp.getExcludedHosts().contains("node1"));
     assertTrue(hfp.getHosts().contains("node2"));
 
-    Set<String> hostsList = new HashSet<String>();
-    Set<String> excludeList = new HashSet<String>();
-    hfp.getHostDetails(hostsList, excludeList);
-    assertTrue(excludeList.contains("node1"));
-    assertTrue(hostsList.contains("node2"));
+    HostDetails hostDetails = hfp.getHostDetails();
+    assertTrue(hostDetails.getExcludedHosts().contains("node1"));
+    assertTrue(hostDetails.getIncludedHosts().contains("node2"));
+    assertEquals(newIncludesFile, hostDetails.getIncludesFile());
+    assertEquals(newExcludesFile, hostDetails.getExcludesFile());
   }
 
   /*
@@ -328,9 +326,8 @@ public class TestHostsFileReader {
     assertEquals(4, includesLen);
     assertEquals(9, excludesLen);
 
-    Set<String> includes = new HashSet<String>();
-    Map<String, Integer> excludes = new HashMap<String, Integer>();
-    hfp.getHostDetails(includes, excludes);
+    HostDetails hostDetails = hfp.getHostDetails();
+    Map<String, Integer> excludes = hostDetails.getExcludedMap();
     assertTrue(excludes.containsKey("host1"));
     assertTrue(excludes.containsKey("host2"));
     assertTrue(excludes.containsKey("host3"));

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d87a63a9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/NodesListManager.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/NodesListManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/NodesListManager.java
index 7d69f93..edd173b 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/NodesListManager.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/NodesListManager.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.yarn.server.resourcemanager;
 
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
@@ -40,6 +39,7 @@ import org.apache.hadoop.net.Node;
 import org.apache.hadoop.service.AbstractService;
 import org.apache.hadoop.service.CompositeService;
 import org.apache.hadoop.util.HostsFileReader;
+import org.apache.hadoop.util.HostsFileReader.HostDetails;
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.records.NodeId;
@@ -192,14 +192,11 @@ public class NodesListManager extends CompositeService implements
         conf.get(YarnConfiguration.RM_NODES_EXCLUDE_FILE_PATH,
             YarnConfiguration.DEFAULT_RM_NODES_EXCLUDE_FILE_PATH));
 
-    Set<String> hostsList = new HashSet<String>();
-    Set<String> excludeList = new HashSet<String>();
-    hostsReader.getHostDetails(hostsList, excludeList);
-
-    for (String include : hostsList) {
+    HostDetails hostDetails = hostsReader.getHostDetails();
+    for (String include : hostDetails.getIncludedHosts()) {
       LOG.debug("include: " + include);
     }
-    for (String exclude : excludeList) {
+    for (String exclude : hostDetails.getExcludedHosts()) {
       LOG.debug("exclude: " + exclude);
     }
   }
@@ -262,9 +259,9 @@ public class NodesListManager extends CompositeService implements
     // Nodes need to be decommissioned (graceful or forceful);
     List<RMNode> nodesToDecom = new ArrayList<RMNode>();
 
-    Set<String> includes = new HashSet<String>();
-    Map<String, Integer> excludes = new HashMap<String, Integer>();
-    hostsReader.getHostDetails(includes, excludes);
+    HostDetails hostDetails = hostsReader.getHostDetails();
+    Set<String> includes = hostDetails.getIncludedHosts();
+    Map<String, Integer> excludes = hostDetails.getExcludedMap();
 
     for (RMNode n : this.rmContext.getRMNodes().values()) {
       NodeState s = n.getState();
@@ -453,10 +450,9 @@ public class NodesListManager extends CompositeService implements
   }
 
   public boolean isValidNode(String hostName) {
-    Set<String> hostsList = new HashSet<String>();
-    Set<String> excludeList = new HashSet<String>();
-    hostsReader.getHostDetails(hostsList, excludeList);
-    return isValidNode(hostName, hostsList, excludeList);
+    HostDetails hostDetails = hostsReader.getHostDetails();
+    return isValidNode(hostName, hostDetails.getIncludedHosts(),
+        hostDetails.getExcludedHosts());
   }
 
   private boolean isValidNode(
@@ -563,9 +559,9 @@ public class NodesListManager extends CompositeService implements
   public boolean isUntrackedNode(String hostName) {
     String ip = resolver.resolve(hostName);
 
-    Set<String> hostsList = new HashSet<String>();
-    Set<String> excludeList = new HashSet<String>();
-    hostsReader.getHostDetails(hostsList, excludeList);
+    HostDetails hostDetails = hostsReader.getHostDetails();
+    Set<String> hostsList = hostDetails.getIncludedHosts();
+    Set<String> excludeList = hostDetails.getExcludedHosts();
 
     return !hostsList.isEmpty() && !hostsList.contains(hostName)
         && !hostsList.contains(ip) && !excludeList.contains(hostName)


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org