You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by GitBox <gi...@apache.org> on 2019/03/12 21:33:11 UTC

[GitHub] [spark] squito commented on a change in pull request #24072: [SPARK-27112] : Spark Scheduler encounters two independent Deadlocks …

squito commented on a change in pull request #24072: [SPARK-27112] : Spark Scheduler encounters two independent Deadlocks …
URL: https://github.com/apache/spark/pull/24072#discussion_r264893474
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
 ##########
 @@ -258,15 +258,17 @@ class CoarseGrainedSchedulerBackend(scheduler: TaskSchedulerImpl, val rpcEnv: Rp
     // Make fake resource offers on all executors
     private def makeOffers() {
       // Make sure no executor is killed while some task is launching on it
-      val taskDescs = CoarseGrainedSchedulerBackend.this.synchronized {
-        // Filter out executors under killing
-        val activeExecutors = executorDataMap.filterKeys(executorIsAlive)
-        val workOffers = activeExecutors.map {
-          case (id, executorData) =>
-            new WorkerOffer(id, executorData.executorHost, executorData.freeCores,
-              Some(executorData.executorAddress.hostPort))
-        }.toIndexedSeq
-        scheduler.resourceOffers(workOffers)
+      val taskDescs = scheduler.synchronized {
 
 Review comment:
   there should be a comment here about why we need both of these locks.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org