You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by do...@apache.org on 2020/05/11 20:14:24 UTC

[spark] branch branch-3.0 updated: [SPARK-31456][CORE] Fix shutdown hook priority edge cases

This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.0 by this push:
     new 37c352c  [SPARK-31456][CORE] Fix shutdown hook priority edge cases
37c352c is described below

commit 37c352c592a9b8650dc5d1c5413af7a96f01631b
Author: oleg <ol...@nexla.com>
AuthorDate: Mon May 11 13:10:39 2020 -0700

    [SPARK-31456][CORE] Fix shutdown hook priority edge cases
    
    ### What changes were proposed in this pull request?
    Fix application order for shutdown hooks for the priorities of Int.MaxValue, Int.MinValue
    
    ### Why are the changes needed?
    The bug causes out-of-order execution of shutdown hooks if their priorities were Int.MinValue or Int.MaxValue
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Added a test covering the change.
    
    Closes #28494 from oleg-smith/SPARK-31456_shutdown_hook_priority.
    
    Authored-by: oleg <ol...@nexla.com>
    Signed-off-by: Dongjoon Hyun <do...@apache.org>
    (cherry picked from commit d7c3e9e53e01011f809b6cb145349ee8a9c5e5f0)
    Signed-off-by: Dongjoon Hyun <do...@apache.org>
---
 core/src/main/scala/org/apache/spark/util/ShutdownHookManager.scala | 4 +---
 core/src/test/scala/org/apache/spark/util/UtilsSuite.scala          | 6 +++++-
 2 files changed, 6 insertions(+), 4 deletions(-)

diff --git a/core/src/main/scala/org/apache/spark/util/ShutdownHookManager.scala b/core/src/main/scala/org/apache/spark/util/ShutdownHookManager.scala
index 4f13112..4db26860 100644
--- a/core/src/main/scala/org/apache/spark/util/ShutdownHookManager.scala
+++ b/core/src/main/scala/org/apache/spark/util/ShutdownHookManager.scala
@@ -209,9 +209,7 @@ private [util] class SparkShutdownHookManager {
 private class SparkShutdownHook(private val priority: Int, hook: () => Unit)
   extends Comparable[SparkShutdownHook] {
 
-  override def compareTo(other: SparkShutdownHook): Int = {
-    other.priority - priority
-  }
+  override def compareTo(other: SparkShutdownHook): Int = other.priority.compareTo(priority)
 
   def run(): Unit = hook()
 
diff --git a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
index f5e438b..931eb6b 100644
--- a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
@@ -745,10 +745,14 @@ class UtilsSuite extends SparkFunSuite with ResetSystemProperties with Logging {
     manager.add(3, () => output += 3)
     manager.add(2, () => output += 2)
     manager.add(4, () => output += 4)
+    manager.add(Int.MinValue, () => output += Int.MinValue)
+    manager.add(Int.MinValue, () => output += Int.MinValue)
+    manager.add(Int.MaxValue, () => output += Int.MaxValue)
+    manager.add(Int.MaxValue, () => output += Int.MaxValue)
     manager.remove(hook1)
 
     manager.runAll()
-    assert(output.toList === List(4, 3, 2))
+    assert(output.toList === List(Int.MaxValue, Int.MaxValue, 4, 3, 2, Int.MinValue, Int.MinValue))
   }
 
   test("isInDirectory") {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org