You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ozone.apache.org by el...@apache.org on 2020/06/30 09:23:37 UTC

[hadoop-ozone] branch master updated: HDDS-3770. Improve getPipelines performance (#1066)

This is an automated email from the ASF dual-hosted git repository.

elek pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hadoop-ozone.git


The following commit(s) were added to refs/heads/master by this push:
     new 574763b  HDDS-3770. Improve getPipelines performance (#1066)
574763b is described below

commit 574763b1cca814a90182a5b820e53dbd0c7de1b8
Author: runzhiwang <51...@users.noreply.github.com>
AuthorDate: Tue Jun 30 17:23:30 2020 +0800

    HDDS-3770. Improve getPipelines performance (#1066)
---
 .../scm/container/common/helpers/ExcludeList.java  | 34 +++++++--------
 .../hdds/scm/container/ContainerManager.java       |  3 +-
 .../hdds/scm/container/SCMContainerManager.java    |  7 ++--
 .../hadoop/hdds/scm/pipeline/PipelineStateMap.java | 49 +++++++++-------------
 .../hadoop/hdds/scm/block/TestBlockManager.java    |  6 ++-
 .../TestContainerStateManagerIntegration.java      | 10 +++--
 6 files changed, 50 insertions(+), 59 deletions(-)

diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ExcludeList.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ExcludeList.java
index dcc3263..f6b111f 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ExcludeList.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ExcludeList.java
@@ -23,9 +23,9 @@ import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
 import org.apache.hadoop.hdds.scm.container.ContainerID;
 import org.apache.hadoop.hdds.scm.pipeline.PipelineID;
 
-import java.util.ArrayList;
-import java.util.List;
 import java.util.Collection;
+import java.util.HashSet;
+import java.util.Set;
 
 /**
  * This class contains set of dns and containers which ozone client provides
@@ -33,22 +33,22 @@ import java.util.Collection;
  */
 public class ExcludeList {
 
-  private final List<DatanodeDetails> datanodes;
-  private final List<ContainerID> containerIds;
-  private final List<PipelineID> pipelineIds;
+  private final Set<DatanodeDetails> datanodes;
+  private final Set<ContainerID> containerIds;
+  private final Set<PipelineID> pipelineIds;
 
 
   public ExcludeList() {
-    datanodes = new ArrayList<>();
-    containerIds = new ArrayList<>();
-    pipelineIds = new ArrayList<>();
+    datanodes = new HashSet<>();
+    containerIds = new HashSet<>();
+    pipelineIds = new HashSet<>();
   }
 
-  public List<ContainerID> getContainerIds() {
+  public Set<ContainerID> getContainerIds() {
     return containerIds;
   }
 
-  public List<DatanodeDetails> getDatanodes() {
+  public Set<DatanodeDetails> getDatanodes() {
     return datanodes;
   }
 
@@ -57,24 +57,18 @@ public class ExcludeList {
   }
 
   public void addDatanode(DatanodeDetails dn) {
-    if (!datanodes.contains(dn)) {
-      datanodes.add(dn);
-    }
+    datanodes.add(dn);
   }
 
   public void addConatinerId(ContainerID containerId) {
-    if (!containerIds.contains(containerId)) {
-      containerIds.add(containerId);
-    }
+    containerIds.add(containerId);
   }
 
   public void addPipeline(PipelineID pipelineId) {
-    if (!pipelineIds.contains(pipelineId)) {
-      pipelineIds.add(pipelineId);
-    }
+    pipelineIds.add(pipelineId);
   }
 
-  public List<PipelineID> getPipelineIds() {
+  public Set<PipelineID> getPipelineIds() {
     return pipelineIds;
   }
 
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerManager.java
index 43c1ced..0e1c98f 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerManager.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerManager.java
@@ -18,6 +18,7 @@ package org.apache.hadoop.hdds.scm.container;
 
 import java.io.Closeable;
 import java.io.IOException;
+import java.util.Collection;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -180,7 +181,7 @@ public interface ContainerManager extends Closeable {
    * @return ContainerInfo for the matching container.
    */
   ContainerInfo getMatchingContainer(long size, String owner,
-      Pipeline pipeline, List<ContainerID> excludedContainerIDS);
+      Pipeline pipeline, Collection<ContainerID> excludedContainerIDS);
 
   /**
    * Once after report processor handler completes, call this to notify
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/SCMContainerManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/SCMContainerManager.java
index 7ac90fc..34177f0 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/SCMContainerManager.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/SCMContainerManager.java
@@ -18,6 +18,7 @@ package org.apache.hadoop.hdds.scm.container;
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
@@ -416,14 +417,14 @@ public class SCMContainerManager implements ContainerManager {
    */
   public ContainerInfo getMatchingContainer(final long sizeRequired,
       String owner, Pipeline pipeline) {
-    return getMatchingContainer(sizeRequired, owner, pipeline, Collections
-        .emptyList());
+    return getMatchingContainer(sizeRequired, owner, pipeline,
+        Collections.emptySet());
   }
 
   @SuppressWarnings("squid:S2445")
   public ContainerInfo getMatchingContainer(final long sizeRequired,
                                             String owner, Pipeline pipeline,
-                                            List<ContainerID>
+                                            Collection<ContainerID>
                                                       excludedContainers) {
     NavigableSet<ContainerID> containerIDs;
     ContainerInfo containerInfo;
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/PipelineStateMap.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/PipelineStateMap.java
index 69fbc08..5e20eb1 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/PipelineStateMap.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/PipelineStateMap.java
@@ -32,7 +32,6 @@ import java.io.IOException;
 import java.util.*;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.CopyOnWriteArrayList;
-import java.util.function.Predicate;
 
 /**
  * Holds the data structures which maintain the information about pipeline and
@@ -247,7 +246,7 @@ class PipelineStateMap {
    *
    * @param type - ReplicationType
    * @param state - Required PipelineState
-   * @param excludeDns list of dns to exclude
+   * @param excludeDns dns to exclude
    * @param excludePipelines pipelines to exclude
    * @return List of pipelines with specified replication type,
    * replication factor and pipeline state
@@ -263,43 +262,35 @@ class PipelineStateMap {
     Preconditions
         .checkNotNull(excludeDns, "Pipeline exclude list cannot be null");
 
-    List<Pipeline> pipelines = getPipelines(type, factor, state);
+    List<Pipeline> pipelines = null;
+    if (state == PipelineState.OPEN) {
+      pipelines = new ArrayList<>(query2OpenPipelines.getOrDefault(
+          new PipelineQuery(type, factor), Collections.EMPTY_LIST));
+    } else {
+      pipelines = new ArrayList<>(pipelineMap.values());
+    }
+
     Iterator<Pipeline> iter = pipelines.iterator();
     while (iter.hasNext()) {
       Pipeline pipeline = iter.next();
-      if (discardPipeline(pipeline, excludePipelines) ||
-          discardDatanode(pipeline, excludeDns)) {
+      if (pipeline.getType() != type ||
+          pipeline.getPipelineState() != state ||
+          pipeline.getFactor() != factor ||
+          excludePipelines.contains(pipeline.getId())) {
         iter.remove();
+      } else {
+        for (DatanodeDetails dn : pipeline.getNodes()) {
+          if (excludeDns.contains(dn)) {
+            iter.remove();
+            break;
+          }
+        }
       }
     }
 
     return pipelines;
   }
 
-  private boolean discardPipeline(Pipeline pipeline,
-      Collection<PipelineID> excludePipelines) {
-    if (excludePipelines.isEmpty()) {
-      return false;
-    }
-    Predicate<PipelineID> predicate = p -> p.equals(pipeline.getId());
-    return excludePipelines.parallelStream().anyMatch(predicate);
-  }
-
-  private boolean discardDatanode(Pipeline pipeline,
-      Collection<DatanodeDetails> excludeDns) {
-    if (excludeDns.isEmpty()) {
-      return false;
-    }
-    boolean discard = false;
-    for (DatanodeDetails dn : pipeline.getNodes()) {
-      Predicate<DatanodeDetails> predicate = p -> p.equals(dn);
-      discard = excludeDns.parallelStream().anyMatch(predicate);
-      if (discard) {
-        break;
-      }
-    }
-    return discard;
-  }
   /**
    * Get set of containerIDs corresponding to a pipeline.
    *
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestBlockManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestBlockManager.java
index c89065c..e0ba53c 100644
--- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestBlockManager.java
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestBlockManager.java
@@ -45,6 +45,7 @@ import org.apache.hadoop.hdds.scm.metadata.SCMMetadataStore;
 import org.apache.hadoop.hdds.scm.metadata.SCMMetadataStoreImpl;
 import org.apache.hadoop.hdds.scm.pipeline.MockRatisPipelineProvider;
 import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
+import org.apache.hadoop.hdds.scm.pipeline.PipelineID;
 import org.apache.hadoop.hdds.scm.pipeline.PipelineProvider;
 import org.apache.hadoop.hdds.scm.pipeline.SCMPipelineManager;
 import org.apache.hadoop.hdds.scm.safemode.SCMSafeModeManager;
@@ -191,8 +192,9 @@ public class TestBlockManager {
         .allocateBlock(DEFAULT_BLOCK_SIZE, type, factor, OzoneConsts.OZONE,
             excludeList);
     Assert.assertNotNull(block);
-    Assert.assertNotEquals(block.getPipeline().getId(),
-        excludeList.getPipelineIds().get(0));
+    for (PipelineID id : excludeList.getPipelineIds()) {
+      Assert.assertNotEquals(block.getPipeline().getId(), id);
+    }
 
     for (Pipeline pipeline : pipelineManager.getPipelines(type, factor)) {
       excludeList.addPipeline(pipeline.getId());
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerStateManagerIntegration.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerStateManagerIntegration.java
index 060f883..b779eed 100644
--- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerStateManagerIntegration.java
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerStateManagerIntegration.java
@@ -18,6 +18,7 @@ package org.apache.hadoop.hdds.scm.container;
 
 import java.util.Arrays;
 import java.util.Collections;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.NavigableSet;
@@ -250,8 +251,8 @@ public class TestContainerStateManagerIntegration {
     // next container should be the same as first container
     ContainerInfo info = containerManager
         .getMatchingContainer(OzoneConsts.GB * 3, OzoneConsts.OZONE,
-            container1.getPipeline(), Collections.singletonList(new
-                ContainerID(1)));
+            container1.getPipeline(),
+            new HashSet<>(Collections.singletonList(new ContainerID(1))));
     Assert.assertNotEquals(container1.getContainerInfo().getContainerID(),
         info.getContainerID());
   }
@@ -275,8 +276,9 @@ public class TestContainerStateManagerIntegration {
 
     ContainerInfo info = containerManager
         .getMatchingContainer(OzoneConsts.GB * 3, OzoneConsts.OZONE,
-            container1.getPipeline(), Arrays.asList(new ContainerID(1), new
-                ContainerID(2), new ContainerID(3)));
+            container1.getPipeline(),
+            new HashSet<>(Arrays.asList(new ContainerID(1), new
+                ContainerID(2), new ContainerID(3))));
     Assert.assertEquals(info.getContainerID(), 4);
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: ozone-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: ozone-commits-help@hadoop.apache.org