aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorMatei Zaharia <matei@databricks.com>2013-12-29 14:03:39 -0500
committerMatei Zaharia <matei@databricks.com>2013-12-29 14:03:39 -0500
commitcd00225db9b90fc845fd1458831bdd9d014d1bb6 (patch)
tree6afd53d8482dda409f302c1f2c20d87987c72dba /core
parent1c11f54a9b7340ccfa7bf7236fbcd210b77ae0a8 (diff)
downloadspark-cd00225db9b90fc845fd1458831bdd9d014d1bb6.tar.gz
spark-cd00225db9b90fc845fd1458831bdd9d014d1bb6.tar.bz2
spark-cd00225db9b90fc845fd1458831bdd9d014d1bb6.zip
Add SparkConf support in Python
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/SparkConf.scala14
-rw-r--r--core/src/main/scala/org/apache/spark/SparkContext.scala2
2 files changed, 12 insertions, 4 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala
index bd24cd19f2..670c8b4caa 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -74,13 +74,21 @@ class SparkConf(loadDefaults: Boolean) extends Serializable with Cloneable {
this
}
- /** Set an environment variable to be used when launching executors for this application. */
+ /**
+ * Set an environment variable to be used when launching executors for this application.
+ * These variables are stored as properties of the form spark.executorEnv.VAR_NAME
+ * (for example spark.executorEnv.PATH) but this method makes them easier to set.
+ */
def setExecutorEnv(variable: String, value: String): SparkConf = {
settings("spark.executorEnv." + variable) = value
this
}
- /** Set multiple environment variables to be used when launching executors. */
+ /**
+ * Set multiple environment variables to be used when launching executors.
+ * These variables are stored as properties of the form spark.executorEnv.VAR_NAME
+ * (for example spark.executorEnv.PATH) but this method makes them easier to set.
+ */
def setExecutorEnv(variables: Seq[(String, String)]): SparkConf = {
for ((k, v) <- variables) {
setExecutorEnv(k, v)
@@ -135,7 +143,7 @@ class SparkConf(loadDefaults: Boolean) extends Serializable with Cloneable {
}
/** Get all parameters as a list of pairs */
- def getAll: Seq[(String, String)] = settings.clone().toSeq
+ def getAll: Array[(String, String)] = settings.clone().toArray
/** Get a parameter, falling back to a default if not set */
def getOrElse(k: String, defaultValue: String): String = {
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 0567f7f437..c109ff930c 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -78,7 +78,7 @@ class SparkContext(
* @param conf a [[org.apache.spark.SparkConf]] object specifying other Spark parameters
*/
def this(master: String, appName: String, conf: SparkConf) =
- this(conf.setMaster(master).setAppName(appName))
+ this(conf.clone().setMaster(master).setAppName(appName))
/**
* Alternative constructor that allows setting common Spark properties directly