aboutsummaryrefslogtreecommitdiff
path: root/launcher
diff options
context:
space:
mode:
authorDhruve Ashar <dhruveashar@gmail.com>2016-04-07 10:39:21 -0500
committerTom Graves <tgraves@yahoo-inc.com>2016-04-07 10:39:21 -0500
commit033d8081525a7137085ec898e2426a58056ee2b8 (patch)
tree08755f863e3c2b9692fe2d0d6332d0645b09bbff /launcher
parent35e0db2d45e2f98d8b4d2c0d442ab19cd615830e (diff)
downloadspark-033d8081525a7137085ec898e2426a58056ee2b8.tar.gz
spark-033d8081525a7137085ec898e2426a58056ee2b8.tar.bz2
spark-033d8081525a7137085ec898e2426a58056ee2b8.zip
[SPARK-12384] Enables spark-clients to set the min(-Xms) and max(*.memory config) j…
## What changes were proposed in this pull request? Currently Spark clients are started with the same memory setting for Xms and Xms leading to reserving unnecessary higher amounts of memory. This behavior is changed and the clients can now specify an initial heap size using the extraJavaOptions in the config for driver,executor and am individually. Note, that only -Xms can be provided through this config option, if the client wants to set the max size(-Xmx), this has to be done via the *.memory configuration knobs which are currently supported. ## How was this patch tested? Monitored executor and yarn logs in debug mode to verify the commands through which they are being launched in client and cluster mode. The driver memory was verified locally using jps -v. Setting up -Xmx parameter in the javaExtraOptions raises exception with the info provided. Author: Dhruve Ashar <dhruveashar@gmail.com> Closes #12115 from dhruve/impr/SPARK-12384.
Diffstat (limited to 'launcher')
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java3
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java13
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java19
-rw-r--r--launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java4
4 files changed, 28 insertions, 11 deletions
diff --git a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
index 7a5e37c501..c7488082ca 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
@@ -74,7 +74,8 @@ abstract class AbstractCommandBuilder {
* SparkLauncher constructor that takes an environment), and may be modified to
* include other variables needed by the process to be executed.
*/
- abstract List<String> buildCommand(Map<String, String> env) throws IOException;
+ abstract List<String> buildCommand(Map<String, String> env)
+ throws IOException, IllegalArgumentException;
/**
* Builds a list of arguments to run java.
diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java
index 6b9d36cc0b..82b593a3f7 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java
@@ -41,7 +41,8 @@ class SparkClassCommandBuilder extends AbstractCommandBuilder {
}
@Override
- public List<String> buildCommand(Map<String, String> env) throws IOException {
+ public List<String> buildCommand(Map<String, String> env)
+ throws IOException, IllegalArgumentException {
List<String> javaOptsKeys = new ArrayList<>();
String memKey = null;
String extraClassPath = null;
@@ -80,12 +81,18 @@ class SparkClassCommandBuilder extends AbstractCommandBuilder {
}
List<String> cmd = buildJavaCommand(extraClassPath);
+
for (String key : javaOptsKeys) {
- addOptionString(cmd, System.getenv(key));
+ String envValue = System.getenv(key);
+ if (!isEmpty(envValue) && envValue.contains("Xmx")) {
+ String msg = String.format("%s is not allowed to specify max heap(Xmx) memory settings " +
+ "(was %s). Use the corresponding configuration instead.", key, envValue);
+ throw new IllegalArgumentException(msg);
+ }
+ addOptionString(cmd, envValue);
}
String mem = firstNonEmpty(memKey != null ? System.getenv(memKey) : null, DEFAULT_MEM);
- cmd.add("-Xms" + mem);
cmd.add("-Xmx" + mem);
addPermGenSizeOpt(cmd);
cmd.add(className);
diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
index c31c42cd3a..6941ca903c 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
@@ -132,7 +132,8 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
}
@Override
- public List<String> buildCommand(Map<String, String> env) throws IOException {
+ public List<String> buildCommand(Map<String, String> env)
+ throws IOException, IllegalArgumentException {
if (PYSPARK_SHELL_RESOURCE.equals(appResource) && !printInfo) {
return buildPySparkShellCommand(env);
} else if (SPARKR_SHELL_RESOURCE.equals(appResource) && !printInfo) {
@@ -211,7 +212,8 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
return args;
}
- private List<String> buildSparkSubmitCommand(Map<String, String> env) throws IOException {
+ private List<String> buildSparkSubmitCommand(Map<String, String> env)
+ throws IOException, IllegalArgumentException {
// Load the properties file and check whether spark-submit will be running the app's driver
// or just launching a cluster app. When running the driver, the JVM's argument will be
// modified to cover the driver's configuration.
@@ -227,6 +229,16 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
addOptionString(cmd, System.getenv("SPARK_SUBMIT_OPTS"));
addOptionString(cmd, System.getenv("SPARK_JAVA_OPTS"));
+ // We don't want the client to specify Xmx. These have to be set by their corresponding
+ // memory flag --driver-memory or configuration entry spark.driver.memory
+ String driverExtraJavaOptions = config.get(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS);
+ if (!isEmpty(driverExtraJavaOptions) && driverExtraJavaOptions.contains("Xmx")) {
+ String msg = String.format("Not allowed to specify max heap(Xmx) memory settings through " +
+ "java options (was %s). Use the corresponding --driver-memory or " +
+ "spark.driver.memory configuration instead.", driverExtraJavaOptions);
+ throw new IllegalArgumentException(msg);
+ }
+
if (isClientMode) {
// Figuring out where the memory value come from is a little tricky due to precedence.
// Precedence is observed in the following order:
@@ -240,9 +252,8 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
isThriftServer(mainClass) ? System.getenv("SPARK_DAEMON_MEMORY") : null;
String memory = firstNonEmpty(tsMemory, config.get(SparkLauncher.DRIVER_MEMORY),
System.getenv("SPARK_DRIVER_MEMORY"), System.getenv("SPARK_MEM"), DEFAULT_MEM);
- cmd.add("-Xms" + memory);
cmd.add("-Xmx" + memory);
- addOptionString(cmd, config.get(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS));
+ addOptionString(cmd, driverExtraJavaOptions);
mergeEnvPathList(env, getLibPathEnvName(),
config.get(SparkLauncher.DRIVER_EXTRA_LIBRARY_PATH));
}
diff --git a/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java b/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java
index 29cbbe825b..c7e8b2e03a 100644
--- a/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java
+++ b/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java
@@ -79,7 +79,6 @@ public class SparkSubmitCommandBuilderSuite extends BaseSuite {
assertTrue(findInStringList(env.get(CommandBuilderUtils.getLibPathEnvName()),
File.pathSeparator, "/driverLibPath"));
assertTrue(findInStringList(findArgValue(cmd, "-cp"), File.pathSeparator, "/driverCp"));
- assertTrue("Driver -Xms should be configured.", cmd.contains("-Xms42g"));
assertTrue("Driver -Xmx should be configured.", cmd.contains("-Xmx42g"));
assertTrue("Command should contain user-defined conf.",
Collections.indexOfSubList(cmd, Arrays.asList(parser.CONF, "spark.randomOption=foo")) > 0);
@@ -202,12 +201,11 @@ public class SparkSubmitCommandBuilderSuite extends BaseSuite {
// Checks below are different for driver and non-driver mode.
if (isDriver) {
- assertTrue("Driver -Xms should be configured.", cmd.contains("-Xms1g"));
assertTrue("Driver -Xmx should be configured.", cmd.contains("-Xmx1g"));
} else {
boolean found = false;
for (String arg : cmd) {
- if (arg.startsWith("-Xms") || arg.startsWith("-Xmx")) {
+ if (arg.startsWith("-Xmx")) {
found = true;
break;
}