You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by sr...@apache.org on 2015/02/16 23:54:37 UTC
spark git commit: SPARK-5841: remove DiskBlockManager shutdown hook
on stop
Repository: spark
Updated Branches:
refs/heads/master c51ab37fa -> bb05982dd
SPARK-5841: remove DiskBlockManager shutdown hook on stop
After a call to stop, the shutdown hook is redundant, and causes a
memory leak.
Author: Matt Whelan <mw...@perka.com>
Closes #4627 from MattWhelan/SPARK-5841 and squashes the following commits:
d5f5c7f [Matt Whelan] SPARK-5841: remove DiskBlockManager shutdown hook on stop
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/bb05982d
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/bb05982d
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/bb05982d
Branch: refs/heads/master
Commit: bb05982dd25e008fb01684dff1f95d03e7271721
Parents: c51ab37
Author: Matt Whelan <mw...@perka.com>
Authored: Mon Feb 16 22:54:32 2015 +0000
Committer: Sean Owen <so...@cloudera.com>
Committed: Mon Feb 16 22:54:32 2015 +0000
----------------------------------------------------------------------
.../org/apache/spark/storage/DiskBlockManager.scala | 13 +++++++++----
1 file changed, 9 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/bb05982d/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala b/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala
index 53eaeda..ae9df8c 100644
--- a/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala
+++ b/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala
@@ -49,7 +49,7 @@ private[spark] class DiskBlockManager(blockManager: BlockManager, conf: SparkCon
}
private val subDirs = Array.fill(localDirs.length)(new Array[File](subDirsPerLocalDir))
- addShutdownHook()
+ private val shutdownHook = addShutdownHook()
/** Looks up a file by hashing it into one of our local subdirectories. */
// This method should be kept in sync with
@@ -134,17 +134,22 @@ private[spark] class DiskBlockManager(blockManager: BlockManager, conf: SparkCon
}
}
- private def addShutdownHook() {
- Runtime.getRuntime.addShutdownHook(new Thread("delete Spark local dirs") {
+ private def addShutdownHook(): Thread = {
+ val shutdownHook = new Thread("delete Spark local dirs") {
override def run(): Unit = Utils.logUncaughtExceptions {
logDebug("Shutdown hook called")
DiskBlockManager.this.stop()
}
- })
+ }
+ Runtime.getRuntime.addShutdownHook(shutdownHook)
+ shutdownHook
}
/** Cleanup local dirs and stop shuffle sender. */
private[spark] def stop() {
+ // Remove the shutdown hook. It causes memory leaks if we leave it around.
+ Runtime.getRuntime.removeShutdownHook(shutdownHook)
+
// Only perform cleanup if an external service is not serving our shuffle files.
if (!blockManager.externalShuffleServiceEnabled || blockManager.blockManagerId.isDriver) {
localDirs.foreach { localDir =>
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org