aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala11
1 files changed, 7 insertions, 4 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
index bf449afae6..153eee3bc5 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
@@ -381,16 +381,19 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
object SparkSubmitArguments {
/** Load properties present in the given file. */
def getPropertiesFromFile(file: File): Seq[(String, String)] = {
- require(file.exists(), s"Properties file ${file.getName} does not exist")
+ require(file.exists(), s"Properties file $file does not exist")
+ require(file.isFile(), s"Properties file $file is not a normal file")
val inputStream = new FileInputStream(file)
- val properties = new Properties()
try {
+ val properties = new Properties()
properties.load(inputStream)
+ properties.stringPropertyNames().toSeq.map(k => (k, properties(k).trim))
} catch {
case e: IOException =>
- val message = s"Failed when loading Spark properties file ${file.getName}"
+ val message = s"Failed when loading Spark properties file $file"
throw new SparkException(message, e)
+ } finally {
+ inputStream.close()
}
- properties.stringPropertyNames().toSeq.map(k => (k, properties(k).trim))
}
}