aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorAaron Davidson <aaron@databricks.com>2014-05-31 12:36:58 -0700
committerReynold Xin <rxin@apache.org>2014-05-31 12:37:57 -0700
commit5ef94ebd1407acdd0d80450a1101ae9a6fe1a636 (patch)
tree654cf3310cb2367a13cf3ef5b4a8e230588e7d4a /core
parent8575d8809f9062dc87ca72c57c0de42ec570118f (diff)
downloadspark-5ef94ebd1407acdd0d80450a1101ae9a6fe1a636.tar.gz
spark-5ef94ebd1407acdd0d80450a1101ae9a6fe1a636.tar.bz2
spark-5ef94ebd1407acdd0d80450a1101ae9a6fe1a636.zip
Super minor: Close inputStream in SparkSubmitArguments
`Properties#load()` doesn't close the InputStream, but it'd be closed after being GC'd anyway... Also changed file.getName to file, because getName only shows the filename. This will show the full (possibly relative) path, which is less confusing if it's not found. Author: Aaron Davidson <aaron@databricks.com> Closes #914 from aarondav/tiny and squashes the following commits: db9d072 [Aaron Davidson] Super minor: Close inputStream in SparkSubmitArguments (cherry picked from commit 7d52777effd0ff41aed545f53d2ab8de2364a188) Signed-off-by: Reynold Xin <rxin@apache.org>
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala11
1 files changed, 7 insertions, 4 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
index bf449afae6..153eee3bc5 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
@@ -381,16 +381,19 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
object SparkSubmitArguments {
/** Load properties present in the given file. */
def getPropertiesFromFile(file: File): Seq[(String, String)] = {
- require(file.exists(), s"Properties file ${file.getName} does not exist")
+ require(file.exists(), s"Properties file $file does not exist")
+ require(file.isFile(), s"Properties file $file is not a normal file")
val inputStream = new FileInputStream(file)
- val properties = new Properties()
try {
+ val properties = new Properties()
properties.load(inputStream)
+ properties.stringPropertyNames().toSeq.map(k => (k, properties(k).trim))
} catch {
case e: IOException =>
- val message = s"Failed when loading Spark properties file ${file.getName}"
+ val message = s"Failed when loading Spark properties file $file"
throw new SparkException(message, e)
+ } finally {
+ inputStream.close()
}
- properties.stringPropertyNames().toSeq.map(k => (k, properties(k).trim))
}
}