You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by an...@apache.org on 2014/10/27 02:02:11 UTC
git commit: [SPARK-3970] Remove duplicate removal of local dirs
Repository: spark
Updated Branches:
refs/heads/master f4e8c289d -> 6377adaf3
[SPARK-3970] Remove duplicate removal of local dirs
The shutdown hook of `DiskBlockManager` would remove localDirs. So do not need to register them with `Utils.registerShutdownDeleteDir`. It causes duplicate removal of these local dirs and corresponding exceptions.
Author: Liang-Chi Hsieh <vi...@gmail.com>
Closes #2826 from viirya/fix_duplicate_localdir_remove and squashes the following commits:
051d4b5 [Liang-Chi Hsieh] check dir existing and return empty List as default.
2b91a9c [Liang-Chi Hsieh] remove duplicate removal of local dirs.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/6377adaf
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/6377adaf
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/6377adaf
Branch: refs/heads/master
Commit: 6377adaf3212b4facb4af644b70b7e99455cef48
Parents: f4e8c28
Author: Liang-Chi Hsieh <vi...@gmail.com>
Authored: Sun Oct 26 18:02:06 2014 -0700
Committer: Andrew Or <an...@databricks.com>
Committed: Sun Oct 26 18:02:06 2014 -0700
----------------------------------------------------------------------
.../org/apache/spark/storage/DiskBlockManager.scala | 1 -
core/src/main/scala/org/apache/spark/util/Utils.scala | 12 ++++++++----
2 files changed, 8 insertions(+), 5 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/6377adaf/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala b/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala
index 6633a1d..99e9253 100644
--- a/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala
+++ b/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala
@@ -149,7 +149,6 @@ private[spark] class DiskBlockManager(blockManager: BlockManager, conf: SparkCon
}
private def addShutdownHook() {
- localDirs.foreach(localDir => Utils.registerShutdownDeleteDir(localDir))
Runtime.getRuntime.addShutdownHook(new Thread("delete Spark local dirs") {
override def run(): Unit = Utils.logUncaughtExceptions {
logDebug("Shutdown hook called")
http://git-wip-us.apache.org/repos/asf/spark/blob/6377adaf/core/src/main/scala/org/apache/spark/util/Utils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index e1dc492..d722ee5 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -739,11 +739,15 @@ private[spark] object Utils extends Logging {
}
private def listFilesSafely(file: File): Seq[File] = {
- val files = file.listFiles()
- if (files == null) {
- throw new IOException("Failed to list files for dir: " + file)
+ if (file.exists()) {
+ val files = file.listFiles()
+ if (files == null) {
+ throw new IOException("Failed to list files for dir: " + file)
+ }
+ files
+ } else {
+ List()
}
- files
}
/**
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org