aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorWangTao <barneystinson@aliyun.com>2014-09-20 19:07:07 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-09-20 19:07:23 -0700
commit8e875d2aff5f30a5f7a4bf694fc89a8b852fdcdc (patch)
treeace1ee5a2a469ccad0919db16379d4e555fd129f /core
parent293ce85145d7a37f7cb329831cbf921be571c2f5 (diff)
downloadspark-8e875d2aff5f30a5f7a4bf694fc89a8b852fdcdc.tar.gz
spark-8e875d2aff5f30a5f7a4bf694fc89a8b852fdcdc.tar.bz2
spark-8e875d2aff5f30a5f7a4bf694fc89a8b852fdcdc.zip
[SPARK-3599]Avoid loading properties file frequently
https://issues.apache.org/jira/browse/SPARK-3599 Author: WangTao <barneystinson@aliyun.com> Author: WangTaoTheTonic <barneystinson@aliyun.com> Closes #2454 from WangTaoTheTonic/avoidLoadingFrequently and squashes the following commits: 3681182 [WangTao] do not use clone 7dca036 [WangTao] use lazy val instead 2a79f26 [WangTaoTheTonic] Avoid loaing properties file frequently
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala17
2 files changed, 10 insertions, 9 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
index d132ecb3f9..580a439c9a 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
@@ -280,7 +280,7 @@ object SparkSubmit {
}
// Read from default spark properties, if any
- for ((k, v) <- args.getDefaultSparkProperties) {
+ for ((k, v) <- args.defaultSparkProperties) {
sysProps.getOrElseUpdate(k, v)
}
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
index d545f58c5d..92e0917743 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
@@ -57,12 +57,8 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
var pyFiles: String = null
val sparkProperties: HashMap[String, String] = new HashMap[String, String]()
- parseOpts(args.toList)
- mergeSparkProperties()
- checkRequiredArguments()
-
- /** Return default present in the currently defined defaults file. */
- def getDefaultSparkProperties = {
+ /** Default properties present in the currently defined defaults file. */
+ lazy val defaultSparkProperties: HashMap[String, String] = {
val defaultProperties = new HashMap[String, String]()
if (verbose) SparkSubmit.printStream.println(s"Using properties file: $propertiesFile")
Option(propertiesFile).foreach { filename =>
@@ -79,6 +75,10 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
defaultProperties
}
+ parseOpts(args.toList)
+ mergeSparkProperties()
+ checkRequiredArguments()
+
/**
* Fill in any undefined values based on the default properties file or options passed in through
* the '--conf' flag.
@@ -107,7 +107,8 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
}
}
- val properties = getDefaultSparkProperties
+ val properties = HashMap[String, String]()
+ properties.putAll(defaultSparkProperties)
properties.putAll(sparkProperties)
// Use properties file as fallback for values which have a direct analog to
@@ -213,7 +214,7 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
| verbose $verbose
|
|Default properties from $propertiesFile:
- |${getDefaultSparkProperties.mkString(" ", "\n ", "\n")}
+ |${defaultSparkProperties.mkString(" ", "\n ", "\n")}
""".stripMargin
}