diff options
author | Mark Grover <mark@apache.org> | 2014-03-01 16:21:22 -0800 |
---|---|---|
committer | Patrick Wendell <pwendell@gmail.com> | 2014-03-01 16:21:22 -0800 |
commit | 9aa095711858ce8670e51488f66a3d7c1a821c30 (patch) | |
tree | feb04bc47f9156ecc1d8aa67650139fac5d81c2f | |
parent | 556c56689bbc32c6cec0d07b57bd3ec73ceb243e (diff) | |
download | spark-9aa095711858ce8670e51488f66a3d7c1a821c30.tar.gz spark-9aa095711858ce8670e51488f66a3d7c1a821c30.tar.bz2 spark-9aa095711858ce8670e51488f66a3d7c1a821c30.zip |
[SPARK-1150] fix repo location in create script
https://spark-project.atlassian.net/browse/SPARK-1150
fix the repo location in create_release script
Author: Mark Grover <mark@apache.org>
Closes #48 from CodingCat/script_fixes and squashes the following commits:
01f4bf7 [Mark Grover] Fixing some nitpicks
d2244d4 [Mark Grover] SPARK-676: Abbreviation in SPARK_MEM but not in SPARK_WORKER_MEMORY
-rwxr-xr-x | conf/spark-env.sh.template | 2 | ||||
-rw-r--r-- | core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala | 10 | ||||
-rw-r--r-- | docs/spark-standalone.md | 4 |
3 files changed, 11 insertions, 5 deletions
diff --git a/conf/spark-env.sh.template b/conf/spark-env.sh.template index 6432a56608..619fc27d53 100755 --- a/conf/spark-env.sh.template +++ b/conf/spark-env.sh.template @@ -15,7 +15,7 @@ # - SPARK_MASTER_IP, to bind the master to a different IP address or hostname # - SPARK_MASTER_PORT / SPARK_MASTER_WEBUI_PORT, to use non-default ports # - SPARK_WORKER_CORES, to set the number of cores to use on this machine -# - SPARK_WORKER_MEMORY, to set how much memory to use (e.g. 1000m, 2g) +# - SPARK_WORKER_MEM, to set how much memory to use (e.g. 1000m, 2g) # - SPARK_WORKER_PORT / SPARK_WORKER_WEBUI_PORT # - SPARK_WORKER_INSTANCES, to set the number of worker processes per node # - SPARK_WORKER_DIR, to set the working directory of worker processes diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala index d35d5be73f..52c4419639 100644 --- a/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala @@ -18,13 +18,15 @@ package org.apache.spark.deploy.worker import java.lang.management.ManagementFactory +import org.apache.spark.Logging import org.apache.spark.util.{IntParam, MemoryParam, Utils} /** * Command-line parser for the master. */ -private[spark] class WorkerArguments(args: Array[String]) { +private[spark] class WorkerArguments(args: Array[String]) extends Logging { + initLogging() var host = Utils.localHostName() var port = 0 var webUiPort = 8081 @@ -40,9 +42,13 @@ private[spark] class WorkerArguments(args: Array[String]) { if (System.getenv("SPARK_WORKER_CORES") != null) { cores = System.getenv("SPARK_WORKER_CORES").toInt } - if (System.getenv("SPARK_WORKER_MEMORY") != null) { + if (System.getenv("SPARK_WORKER_MEM") != null) { + memory = Utils.memoryStringToMb(System.getenv("SPARK_WORKER_MEM")) + } else if (System.getenv("SPARK_WORKER_MEMORY") != null) { + logWarning("SPARK_WORKER_MEMORY is deprecated. Please use SPARK_WORKER_MEM instead") memory = Utils.memoryStringToMb(System.getenv("SPARK_WORKER_MEMORY")) } + if (System.getenv("SPARK_WORKER_WEBUI_PORT") != null) { webUiPort = System.getenv("SPARK_WORKER_WEBUI_PORT").toInt } diff --git a/docs/spark-standalone.md b/docs/spark-standalone.md index 51fb3a4f7f..a2dec86be1 100644 --- a/docs/spark-standalone.md +++ b/docs/spark-standalone.md @@ -104,8 +104,8 @@ You can optionally configure the cluster further by setting environment variable <td>Total number of cores to allow Spark applications to use on the machine (default: all available cores).</td> </tr> <tr> - <td><code>SPARK_WORKER_MEMORY</code></td> - <td>Total amount of memory to allow Spark applications to use on the machine, e.g. <code>1000m</code>, <code>2g</code> (default: total memory minus 1 GB); note that each application's <i>individual</i> memory is configured using its <code>spark.executor.memory</code> property.</td> + <td><code>SPARK_WORKER_MEM</code></td> + <td>Total amount of memory to allow Spark applications to use on the machine, e.g. <code>1000m</code>, <code>2g</code> (default: total memory minus 1 GB); note that each application's <i>individual</i> memory is configured using its <code>spark.executor.memory</code> property. The old variable </code>SPARK_WORKER_MEMORY</code> has been deprecated.</td> </tr> <tr> <td><code>SPARK_WORKER_WEBUI_PORT</code></td> |