You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by do...@apache.org on 2021/01/06 03:18:01 UTC

[spark] branch branch-3.1 updated: [SPARK-33029][CORE][WEBUI] Fix the UI executor page incorrectly marking the driver as excluded

This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch branch-3.1
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.1 by this push:
     new d945696  [SPARK-33029][CORE][WEBUI] Fix the UI executor page incorrectly marking the driver as excluded
d945696 is described below

commit d94569658735dceb2351a869dcc1909b0ea7e453
Author: Baohe Zhang <ba...@verizonmedia.com>
AuthorDate: Tue Jan 5 19:16:40 2021 -0800

    [SPARK-33029][CORE][WEBUI] Fix the UI executor page incorrectly marking the driver as excluded
    
    ### What changes were proposed in this pull request?
    Filter out the driver entity when updating the exclusion status of live executors(including the driver), so the UI won't be marked as excluded in the UI even if the node that hosts the driver has been marked as excluded.
    
    ### Why are the changes needed?
    Before this change, if we run spark with the standalone mode and with spark.blacklist.enabled=true. The driver will be marked as excluded when the host that hosts that driver has been marked as excluded. While it's incorrect because the exclude list feature will exclude executors only and the driver is still active.
    ![image](https://user-images.githubusercontent.com/26694233/103238740-35c05180-4911-11eb-99a2-c87c059ba0cf.png)
    After the fix, the driver won't be marked as excluded.
    ![image](https://user-images.githubusercontent.com/26694233/103238806-6f915800-4911-11eb-80d5-3c99266cfd0a.png)
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    Manual test. Reopen the UI and see the driver is no longer marked as excluded.
    
    Closes #30954 from baohe-zhang/SPARK-33029.
    
    Authored-by: Baohe Zhang <ba...@verizonmedia.com>
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
    (cherry picked from commit 29510821a0e3b1e09a7710ed02a0fa1caab506af)
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
 .../main/scala/org/apache/spark/status/AppStatusListener.scala    | 8 +++++---
 .../executor_memory_usage_expectation.json                        | 4 ++--
 .../executor_node_excludeOnFailure_expectation.json               | 4 ++--
 3 files changed, 9 insertions(+), 7 deletions(-)

diff --git a/core/src/main/scala/org/apache/spark/status/AppStatusListener.scala b/core/src/main/scala/org/apache/spark/status/AppStatusListener.scala
index bf19897..6cb013b 100644
--- a/core/src/main/scala/org/apache/spark/status/AppStatusListener.scala
+++ b/core/src/main/scala/org/apache/spark/status/AppStatusListener.scala
@@ -366,10 +366,12 @@ private[spark] class AppStatusListener(
 
     // Implicitly exclude every available executor for the stage associated with this node
     Option(liveStages.get((stageId, stageAttemptId))).foreach { stage =>
-      val executorIds = liveExecutors.values.filter(_.host == hostId).map(_.executorId).toSeq
+      val executorIds = liveExecutors.values.filter(exec => exec.host == hostId
+        && exec.executorId != SparkContext.DRIVER_IDENTIFIER).map(_.executorId).toSeq
       setStageExcludedStatus(stage, now, executorIds: _*)
     }
-    liveExecutors.values.filter(_.hostname == hostId).foreach { exec =>
+    liveExecutors.values.filter(exec => exec.hostname == hostId
+      && exec.executorId != SparkContext.DRIVER_IDENTIFIER).foreach { exec =>
       addExcludedStageTo(exec, stageId, now)
     }
   }
@@ -416,7 +418,7 @@ private[spark] class AppStatusListener(
 
     // Implicitly (un)exclude every executor associated with the node.
     liveExecutors.values.foreach { exec =>
-      if (exec.hostname == host) {
+      if (exec.hostname == host && exec.executorId != SparkContext.DRIVER_IDENTIFIER) {
         updateExecExclusionStatus(exec, excluded, now)
       }
     }
diff --git a/core/src/test/resources/HistoryServerExpectations/executor_memory_usage_expectation.json b/core/src/test/resources/HistoryServerExpectations/executor_memory_usage_expectation.json
index 9adda27..5144934 100644
--- a/core/src/test/resources/HistoryServerExpectations/executor_memory_usage_expectation.json
+++ b/core/src/test/resources/HistoryServerExpectations/executor_memory_usage_expectation.json
@@ -16,7 +16,7 @@
   "totalInputBytes" : 0,
   "totalShuffleRead" : 0,
   "totalShuffleWrite" : 0,
-  "isBlacklisted" : true,
+  "isBlacklisted" : false,
   "maxMemory" : 908381388,
   "addTime" : "2016-11-16T22:33:31.477GMT",
   "executorLogs" : { },
@@ -30,7 +30,7 @@
   "attributes" : { },
   "resources" : { },
   "resourceProfileId" : 0,
-  "isExcluded" : true,
+  "isExcluded" : false,
   "excludedInStages" : [ ]
 }, {
   "id" : "3",
diff --git a/core/src/test/resources/HistoryServerExpectations/executor_node_excludeOnFailure_expectation.json b/core/src/test/resources/HistoryServerExpectations/executor_node_excludeOnFailure_expectation.json
index 65bd309..47a01b2 100644
--- a/core/src/test/resources/HistoryServerExpectations/executor_node_excludeOnFailure_expectation.json
+++ b/core/src/test/resources/HistoryServerExpectations/executor_node_excludeOnFailure_expectation.json
@@ -16,7 +16,7 @@
   "totalInputBytes" : 0,
   "totalShuffleRead" : 0,
   "totalShuffleWrite" : 0,
-  "isBlacklisted" : true,
+  "isBlacklisted" : false,
   "maxMemory" : 908381388,
   "addTime" : "2016-11-16T22:33:31.477GMT",
   "executorLogs" : { },
@@ -30,7 +30,7 @@
   "attributes" : { },
   "resources" : { },
   "resourceProfileId" : 0,
-  "isExcluded" : true,
+  "isExcluded" : false,
   "excludedInStages" : [ ]
 }, {
   "id" : "3",


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org