aboutsummaryrefslogtreecommitdiff
path: root/core/src/main/scala/org
diff options
context:
space:
mode:
authorIlya Ganelin <ilya.ganelin@capitalone.com>2015-07-01 23:11:02 -0700
committerAndrew Or <andrew@databricks.com>2015-07-01 23:11:02 -0700
commit3697232b7d438979cc119b2a364296b0eec4a16a (patch)
tree178437d6c6d5ffac1560ce96d1d89f3ca0ba805e /core/src/main/scala/org
parent377ff4c9e8942882183d94698684824e9dc9f391 (diff)
downloadspark-3697232b7d438979cc119b2a364296b0eec4a16a.tar.gz
spark-3697232b7d438979cc119b2a364296b0eec4a16a.tar.bz2
spark-3697232b7d438979cc119b2a364296b0eec4a16a.zip
[SPARK-3071] Increase default driver memory
I've updated default values in comments, documentation, and in the command line builder to be 1g based on comments in the JIRA. I've also updated most usages to point at a single variable defined in the Utils.scala and JavaUtils.java files. This wasn't possible in all cases (R, shell scripts etc.) but usage in most code is now pointing at the same place. Please let me know if I've missed anything. Will the spark-shell use the value within the command line builder during instantiation? Author: Ilya Ganelin <ilya.ganelin@capitalone.com> Closes #7132 from ilganeli/SPARK-3071 and squashes the following commits: 4074164 [Ilya Ganelin] String fix 271610b [Ilya Ganelin] Merge branch 'SPARK-3071' of github.com:ilganeli/spark into SPARK-3071 273b6e9 [Ilya Ganelin] Test fix fd67721 [Ilya Ganelin] Update JavaUtils.java 26cc177 [Ilya Ganelin] test fix e5db35d [Ilya Ganelin] Fixed test failure 39732a1 [Ilya Ganelin] merge fix a6f7deb [Ilya Ganelin] Created default value for DRIVER MEM in Utils that's now used in almost all locations instead of setting manually in each 09ad698 [Ilya Ganelin] Update SubmitRestProtocolSuite.scala 19b6f25 [Ilya Ganelin] Missed one doc update 2698a3d [Ilya Ganelin] Updated default value for driver memory
Diffstat (limited to 'core/src/main/scala/org')
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/ClientArguments.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala5
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/rest/mesos/MesosRestServer.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/util/Utils.scala6
5 files changed, 12 insertions, 5 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/ClientArguments.scala b/core/src/main/scala/org/apache/spark/deploy/ClientArguments.scala
index 316e2d59f0..42d3296062 100644
--- a/core/src/main/scala/org/apache/spark/deploy/ClientArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/ClientArguments.scala
@@ -117,7 +117,7 @@ private[deploy] class ClientArguments(args: Array[String]) {
private[deploy] object ClientArguments {
val DEFAULT_CORES = 1
- val DEFAULT_MEMORY = 512 // MB
+ val DEFAULT_MEMORY = Utils.DEFAULT_DRIVER_MEM_MB // MB
val DEFAULT_SUPERVISE = false
def isValidJarUrl(s: String): Boolean = {
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
index b7429a901e..73ab18332f 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
@@ -461,8 +461,9 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
|Usage: spark-submit --status [submission ID] --master [spark://...]""".stripMargin)
outStream.println(command)
+ val mem_mb = Utils.DEFAULT_DRIVER_MEM_MB
outStream.println(
- """
+ s"""
|Options:
| --master MASTER_URL spark://host:port, mesos://host:port, yarn, or local.
| --deploy-mode DEPLOY_MODE Whether to launch the driver program locally ("client") or
@@ -488,7 +489,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
| --properties-file FILE Path to a file from which to load extra properties. If not
| specified, this will look for conf/spark-defaults.conf.
|
- | --driver-memory MEM Memory for driver (e.g. 1000M, 2G) (Default: 512M).
+ | --driver-memory MEM Memory for driver (e.g. 1000M, 2G) (Default: ${mem_mb}M).
| --driver-java-options Extra Java options to pass to the driver.
| --driver-library-path Extra library path entries to pass to the driver.
| --driver-class-path Extra class path entries to pass to the driver. Note that
diff --git a/core/src/main/scala/org/apache/spark/deploy/rest/mesos/MesosRestServer.scala b/core/src/main/scala/org/apache/spark/deploy/rest/mesos/MesosRestServer.scala
index 8198296eeb..868cc35d06 100644
--- a/core/src/main/scala/org/apache/spark/deploy/rest/mesos/MesosRestServer.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/rest/mesos/MesosRestServer.scala
@@ -59,7 +59,7 @@ private[mesos] class MesosSubmitRequestServlet(
extends SubmitRequestServlet {
private val DEFAULT_SUPERVISE = false
- private val DEFAULT_MEMORY = 512 // mb
+ private val DEFAULT_MEMORY = Utils.DEFAULT_DRIVER_MEM_MB // mb
private val DEFAULT_CORES = 1.0
private val nextDriverNumber = new AtomicLong(0)
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala
index 9678631da9..1d2ecab517 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala
@@ -164,7 +164,7 @@ private[worker] class WorkerArguments(args: Array[String], conf: SparkConf) {
}
}
// Leave out 1 GB for the operating system, but don't return a negative memory size
- math.max(totalMb - 1024, 512)
+ math.max(totalMb - 1024, Utils.DEFAULT_DRIVER_MEM_MB)
}
def checkWorkerMemory(): Unit = {
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index a7fc749a2b..944560a913 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -80,6 +80,12 @@ private[spark] object Utils extends Logging {
*/
val TEMP_DIR_SHUTDOWN_PRIORITY = 25
+ /**
+ * Define a default value for driver memory here since this value is referenced across the code
+ * base and nearly all files already use Utils.scala
+ */
+ val DEFAULT_DRIVER_MEM_MB = JavaUtils.DEFAULT_DRIVER_MEM_MB.toInt
+
private val MAX_DIR_CREATION_ATTEMPTS: Int = 10
@volatile private var localRootDirs: Array[String] = null