From 033d8081525a7137085ec898e2426a58056ee2b8 Mon Sep 17 00:00:00 2001 From: Dhruve Ashar Date: Thu, 7 Apr 2016 10:39:21 -0500 Subject: [SPARK-12384] Enables spark-clients to set the min(-Xms) and max(*.memory config) j… MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What changes were proposed in this pull request? Currently Spark clients are started with the same memory setting for Xms and Xms leading to reserving unnecessary higher amounts of memory. This behavior is changed and the clients can now specify an initial heap size using the extraJavaOptions in the config for driver,executor and am individually. Note, that only -Xms can be provided through this config option, if the client wants to set the max size(-Xmx), this has to be done via the *.memory configuration knobs which are currently supported. ## How was this patch tested? Monitored executor and yarn logs in debug mode to verify the commands through which they are being launched in client and cluster mode. The driver memory was verified locally using jps -v. Setting up -Xmx parameter in the javaExtraOptions raises exception with the info provided. Author: Dhruve Ashar Closes #12115 from dhruve/impr/SPARK-12384. --- core/src/main/scala/org/apache/spark/SparkConf.scala | 6 +++--- .../main/scala/org/apache/spark/launcher/WorkerCommandBuilder.scala | 1 - 2 files changed, 3 insertions(+), 4 deletions(-) (limited to 'core') diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala index e0fd248c43..acce6bc24f 100644 --- a/core/src/main/scala/org/apache/spark/SparkConf.scala +++ b/core/src/main/scala/org/apache/spark/SparkConf.scala @@ -456,9 +456,9 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging { "Set them directly on a SparkConf or in a properties file when using ./bin/spark-submit." throw new Exception(msg) } - if (javaOpts.contains("-Xmx") || javaOpts.contains("-Xms")) { - val msg = s"$executorOptsKey is not allowed to alter memory settings (was '$javaOpts'). " + - "Use spark.executor.memory instead." + if (javaOpts.contains("-Xmx")) { + val msg = s"$executorOptsKey is not allowed to specify max heap memory settings " + + s"(was '$javaOpts'). Use spark.executor.memory instead." throw new Exception(msg) } } diff --git a/core/src/main/scala/org/apache/spark/launcher/WorkerCommandBuilder.scala b/core/src/main/scala/org/apache/spark/launcher/WorkerCommandBuilder.scala index a2add61617..31b9c5edf0 100644 --- a/core/src/main/scala/org/apache/spark/launcher/WorkerCommandBuilder.scala +++ b/core/src/main/scala/org/apache/spark/launcher/WorkerCommandBuilder.scala @@ -37,7 +37,6 @@ private[spark] class WorkerCommandBuilder(sparkHome: String, memoryMb: Int, comm override def buildCommand(env: JMap[String, String]): JList[String] = { val cmd = buildJavaCommand(command.classPathEntries.mkString(File.pathSeparator)) - cmd.add(s"-Xms${memoryMb}M") cmd.add(s"-Xmx${memoryMb}M") command.javaOpts.foreach(cmd.add) CommandBuilderUtils.addPermGenSizeOpt(cmd) -- cgit v1.2.3