aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorAaron Davidson <aaron@databricks.com>2014-03-09 11:08:39 -0700
committerAaron Davidson <aaron@databricks.com>2014-03-09 11:08:39 -0700
commit52834d761b059264214dfc6a1f9c70b8bc7ec089 (patch)
treedeadb9fd8330b40da0b455478c9319dd75421f58 /core
parente59a3b6c415b95e8137f5a154716b12653a8aed0 (diff)
downloadspark-52834d761b059264214dfc6a1f9c70b8bc7ec089.tar.gz
spark-52834d761b059264214dfc6a1f9c70b8bc7ec089.tar.bz2
spark-52834d761b059264214dfc6a1f9c70b8bc7ec089.zip
SPARK-929: Fully deprecate usage of SPARK_MEM
(Continued from old repo, prior discussion at https://github.com/apache/incubator-spark/pull/615) This patch cements our deprecation of the SPARK_MEM environment variable by replacing it with three more specialized variables: SPARK_DAEMON_MEMORY, SPARK_EXECUTOR_MEMORY, and SPARK_DRIVER_MEMORY The creation of the latter two variables means that we can safely set driver/job memory without accidentally setting the executor memory. Neither is public. SPARK_EXECUTOR_MEMORY is only used by the Mesos scheduler (and set within SparkContext). The proper way of configuring executor memory is through the "spark.executor.memory" property. SPARK_DRIVER_MEMORY is the new way of specifying the amount of memory run by jobs launched by spark-class, without possibly affecting executor memory. Other memory considerations: - The repl's memory can be set through the "--drivermem" command-line option, which really just sets SPARK_DRIVER_MEMORY. - run-example doesn't use spark-class, so the only way to modify examples' memory is actually an unusual use of SPARK_JAVA_OPTS (which is normally overriden in all cases by spark-class). This patch also fixes a lurking bug where spark-shell misused spark-class (the first argument is supposed to be the main class name, not java options), as well as a bug in the Windows spark-class2.cmd. I have not yet tested this patch on either Windows or Mesos, however. Author: Aaron Davidson <aaron@databricks.com> Closes #99 from aarondav/sparkmem and squashes the following commits: 9df4c68 [Aaron Davidson] SPARK-929: Fully deprecate usage of SPARK_MEM
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/SparkContext.scala20
-rw-r--r--core/src/main/scala/org/apache/spark/util/Utils.scala2
2 files changed, 11 insertions, 11 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index ce25573834..cdc0e5a342 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -162,19 +162,20 @@ class SparkContext(
jars.foreach(addJar)
}
+ def warnSparkMem(value: String): String = {
+ logWarning("Using SPARK_MEM to set amount of memory to use per executor process is " +
+ "deprecated, please use spark.executor.memory instead.")
+ value
+ }
+
private[spark] val executorMemory = conf.getOption("spark.executor.memory")
- .orElse(Option(System.getenv("SPARK_MEM")))
+ .orElse(Option(System.getenv("SPARK_EXECUTOR_MEMORY")))
+ .orElse(Option(System.getenv("SPARK_MEM")).map(warnSparkMem))
.map(Utils.memoryStringToMb)
.getOrElse(512)
- if (!conf.contains("spark.executor.memory") && sys.env.contains("SPARK_MEM")) {
- logWarning("Using SPARK_MEM to set amount of memory to use per executor process is " +
- "deprecated, instead use spark.executor.memory")
- }
-
// Environment variables to pass to our executors
private[spark] val executorEnvs = HashMap[String, String]()
- // Note: SPARK_MEM is included for Mesos, but overwritten for standalone mode in ExecutorRunner
for (key <- Seq("SPARK_CLASSPATH", "SPARK_LIBRARY_PATH", "SPARK_JAVA_OPTS");
value <- Option(System.getenv(key))) {
executorEnvs(key) = value
@@ -185,8 +186,9 @@ class SparkContext(
value <- Option(System.getenv(envKey)).orElse(Option(System.getProperty(propKey)))} {
executorEnvs(envKey) = value
}
- // Since memory can be set with a system property too, use that
- executorEnvs("SPARK_MEM") = executorMemory + "m"
+ // The Mesos scheduler backend relies on this environment variable to set executor memory.
+ // TODO: Set this only in the Mesos scheduler.
+ executorEnvs("SPARK_EXECUTOR_MEMORY") = executorMemory + "m"
executorEnvs ++= conf.getExecutorEnv
// Set SPARK_USER for user who is running SparkContext.
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index 0eb2f78b73..53458b6660 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -532,8 +532,6 @@ private[spark] object Utils extends Logging {
/**
* Convert a Java memory parameter passed to -Xmx (such as 300m or 1g) to a number of megabytes.
- * This is used to figure out how much memory to claim from Mesos based on the SPARK_MEM
- * environment variable.
*/
def memoryStringToMb(str: String): Int = {
val lower = str.toLowerCase