aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorReynold Xin <rxin@cs.berkeley.edu>2013-06-17 16:58:23 -0400
committerReynold Xin <rxin@cs.berkeley.edu>2013-06-17 16:58:23 -0400
commit1450296797e53f1a01166c885050091df9c96e2e (patch)
treea61484093718eaae3341e63322617f129edd2be3 /core
parente6d1277315801b90cca23c926cf1ceb97b65c87c (diff)
downloadspark-1450296797e53f1a01166c885050091df9c96e2e.tar.gz
spark-1450296797e53f1a01166c885050091df9c96e2e.tar.bz2
spark-1450296797e53f1a01166c885050091df9c96e2e.zip
SPARK-781: Log the temp directory path when Spark says "Failed to create
temp directory".
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/spark/Utils.scala4
-rw-r--r--core/src/main/scala/spark/storage/DiskStore.scala34
2 files changed, 19 insertions, 19 deletions
diff --git a/core/src/main/scala/spark/Utils.scala b/core/src/main/scala/spark/Utils.scala
index ec15326014..fd7b8cc8d5 100644
--- a/core/src/main/scala/spark/Utils.scala
+++ b/core/src/main/scala/spark/Utils.scala
@@ -116,8 +116,8 @@ private object Utils extends Logging {
while (dir == null) {
attempts += 1
if (attempts > maxAttempts) {
- throw new IOException("Failed to create a temp directory after " + maxAttempts +
- " attempts!")
+ throw new IOException("Failed to create a temp directory under (" + root + ") after " +
+ maxAttempts + " attempts!")
}
try {
dir = new File(root, "spark-" + UUID.randomUUID.toString)
diff --git a/core/src/main/scala/spark/storage/DiskStore.scala b/core/src/main/scala/spark/storage/DiskStore.scala
index c7281200e7..9914beec99 100644
--- a/core/src/main/scala/spark/storage/DiskStore.scala
+++ b/core/src/main/scala/spark/storage/DiskStore.scala
@@ -82,15 +82,15 @@ private class DiskStore(blockManager: BlockManager, rootDirs: String)
override def size(): Long = lastValidPosition
}
- val MAX_DIR_CREATION_ATTEMPTS: Int = 10
- val subDirsPerLocalDir = System.getProperty("spark.diskStore.subDirectories", "64").toInt
+ private val MAX_DIR_CREATION_ATTEMPTS: Int = 10
+ private val subDirsPerLocalDir = System.getProperty("spark.diskStore.subDirectories", "64").toInt
- var shuffleSender : ShuffleSender = null
+ private var shuffleSender : ShuffleSender = null
// Create one local directory for each path mentioned in spark.local.dir; then, inside this
// directory, create multiple subdirectories that we will hash files into, in order to avoid
// having really large inodes at the top level.
- val localDirs = createLocalDirs()
- val subDirs = Array.fill(localDirs.length)(new Array[File](subDirsPerLocalDir))
+ private val localDirs: Array[File] = createLocalDirs()
+ private val subDirs = Array.fill(localDirs.length)(new Array[File](subDirsPerLocalDir))
addShutdownHook()
@@ -99,7 +99,6 @@ private class DiskStore(blockManager: BlockManager, rootDirs: String)
new DiskBlockObjectWriter(blockId, serializer, bufferSize)
}
-
override def getSize(blockId: String): Long = {
getFile(blockId).length()
}
@@ -232,8 +231,8 @@ private class DiskStore(blockManager: BlockManager, rootDirs: String)
private def createLocalDirs(): Array[File] = {
logDebug("Creating local directories at root dirs '" + rootDirs + "'")
val dateFormat = new SimpleDateFormat("yyyyMMddHHmmss")
- rootDirs.split(",").map(rootDir => {
- var foundLocalDir: Boolean = false
+ rootDirs.split(",").map { rootDir =>
+ var foundLocalDir = false
var localDir: File = null
var localDirId: String = null
var tries = 0
@@ -248,7 +247,7 @@ private class DiskStore(blockManager: BlockManager, rootDirs: String)
}
} catch {
case e: Exception =>
- logWarning("Attempt " + tries + " to create local dir failed", e)
+ logWarning("Attempt " + tries + " to create local dir " + localDir + " failed", e)
}
}
if (!foundLocalDir) {
@@ -258,7 +257,7 @@ private class DiskStore(blockManager: BlockManager, rootDirs: String)
}
logInfo("Created local directory at " + localDir)
localDir
- })
+ }
}
private def addShutdownHook() {
@@ -266,15 +265,16 @@ private class DiskStore(blockManager: BlockManager, rootDirs: String)
Runtime.getRuntime.addShutdownHook(new Thread("delete Spark local dirs") {
override def run() {
logDebug("Shutdown hook called")
- try {
- localDirs.foreach { localDir =>
+ localDirs.foreach { localDir =>
+ try {
if (!Utils.hasRootAsShutdownDeleteDir(localDir)) Utils.deleteRecursively(localDir)
+ } catch {
+ case t: Throwable =>
+ logError("Exception while deleting local spark dir: " + localDir, t)
}
- if (shuffleSender != null) {
- shuffleSender.stop
- }
- } catch {
- case t: Throwable => logError("Exception while deleting local spark dirs", t)
+ }
+ if (shuffleSender != null) {
+ shuffleSender.stop
}
}
})