aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorLiang-Chi Hsieh <viirya@gmail.com>2014-10-26 18:02:06 -0700
committerAndrew Or <andrew@databricks.com>2014-10-26 18:02:06 -0700
commit6377adaf3212b4facb4af644b70b7e99455cef48 (patch)
treef75b6b6ee5b391d5038d12d8577a6bba883b84df
parentf4e8c289d899fc03ee3cebd01d810f207868b448 (diff)
downloadspark-6377adaf3212b4facb4af644b70b7e99455cef48.tar.gz
spark-6377adaf3212b4facb4af644b70b7e99455cef48.tar.bz2
spark-6377adaf3212b4facb4af644b70b7e99455cef48.zip
[SPARK-3970] Remove duplicate removal of local dirs
The shutdown hook of `DiskBlockManager` would remove localDirs. So do not need to register them with `Utils.registerShutdownDeleteDir`. It causes duplicate removal of these local dirs and corresponding exceptions. Author: Liang-Chi Hsieh <viirya@gmail.com> Closes #2826 from viirya/fix_duplicate_localdir_remove and squashes the following commits: 051d4b5 [Liang-Chi Hsieh] check dir existing and return empty List as default. 2b91a9c [Liang-Chi Hsieh] remove duplicate removal of local dirs.
-rw-r--r--core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/util/Utils.scala12
2 files changed, 8 insertions, 5 deletions
diff --git a/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala b/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala
index 6633a1db57..99e925328a 100644
--- a/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala
+++ b/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala
@@ -149,7 +149,6 @@ private[spark] class DiskBlockManager(blockManager: BlockManager, conf: SparkCon
}
private def addShutdownHook() {
- localDirs.foreach(localDir => Utils.registerShutdownDeleteDir(localDir))
Runtime.getRuntime.addShutdownHook(new Thread("delete Spark local dirs") {
override def run(): Unit = Utils.logUncaughtExceptions {
logDebug("Shutdown hook called")
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index e1dc492387..d722ee5a97 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -739,11 +739,15 @@ private[spark] object Utils extends Logging {
}
private def listFilesSafely(file: File): Seq[File] = {
- val files = file.listFiles()
- if (files == null) {
- throw new IOException("Failed to list files for dir: " + file)
+ if (file.exists()) {
+ val files = file.listFiles()
+ if (files == null) {
+ throw new IOException("Failed to list files for dir: " + file)
+ }
+ files
+ } else {
+ List()
}
- files
}
/**