diff options
author | Aaron Davidson <aaron@databricks.com> | 2014-05-31 12:36:58 -0700 |
---|---|---|
committer | Reynold Xin <rxin@apache.org> | 2014-05-31 12:36:58 -0700 |
commit | 7d52777effd0ff41aed545f53d2ab8de2364a188 (patch) | |
tree | e9d4c111b2b83ae5eda6500d2ce46a7be5fbe19e /core | |
parent | 1a0da0ec5799f3226ce238cac03f53fa4f7c9326 (diff) | |
download | spark-7d52777effd0ff41aed545f53d2ab8de2364a188.tar.gz spark-7d52777effd0ff41aed545f53d2ab8de2364a188.tar.bz2 spark-7d52777effd0ff41aed545f53d2ab8de2364a188.zip |
Super minor: Close inputStream in SparkSubmitArguments
`Properties#load()` doesn't close the InputStream, but it'd be closed after being GC'd anyway...
Also changed file.getName to file, because getName only shows the filename. This will show the full (possibly relative) path, which is less confusing if it's not found.
Author: Aaron Davidson <aaron@databricks.com>
Closes #914 from aarondav/tiny and squashes the following commits:
db9d072 [Aaron Davidson] Super minor: Close inputStream in SparkSubmitArguments
Diffstat (limited to 'core')
-rw-r--r-- | core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala | 11 |
1 files changed, 7 insertions, 4 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala index bf449afae6..153eee3bc5 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala @@ -381,16 +381,19 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) { object SparkSubmitArguments { /** Load properties present in the given file. */ def getPropertiesFromFile(file: File): Seq[(String, String)] = { - require(file.exists(), s"Properties file ${file.getName} does not exist") + require(file.exists(), s"Properties file $file does not exist") + require(file.isFile(), s"Properties file $file is not a normal file") val inputStream = new FileInputStream(file) - val properties = new Properties() try { + val properties = new Properties() properties.load(inputStream) + properties.stringPropertyNames().toSeq.map(k => (k, properties(k).trim)) } catch { case e: IOException => - val message = s"Failed when loading Spark properties file ${file.getName}" + val message = s"Failed when loading Spark properties file $file" throw new SparkException(message, e) + } finally { + inputStream.close() } - properties.stringPropertyNames().toSeq.map(k => (k, properties(k).trim)) } } |