aboutsummaryrefslogtreecommitdiff
path: root/launcher/src/main/java/org/apache
diff options
context:
space:
mode:
Diffstat (limited to 'launcher/src/main/java/org/apache')
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java3
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java13
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java19
3 files changed, 27 insertions, 8 deletions
diff --git a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
index 7a5e37c501..c7488082ca 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
@@ -74,7 +74,8 @@ abstract class AbstractCommandBuilder {
* SparkLauncher constructor that takes an environment), and may be modified to
* include other variables needed by the process to be executed.
*/
- abstract List<String> buildCommand(Map<String, String> env) throws IOException;
+ abstract List<String> buildCommand(Map<String, String> env)
+ throws IOException, IllegalArgumentException;
/**
* Builds a list of arguments to run java.
diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java
index 6b9d36cc0b..82b593a3f7 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java
@@ -41,7 +41,8 @@ class SparkClassCommandBuilder extends AbstractCommandBuilder {
}
@Override
- public List<String> buildCommand(Map<String, String> env) throws IOException {
+ public List<String> buildCommand(Map<String, String> env)
+ throws IOException, IllegalArgumentException {
List<String> javaOptsKeys = new ArrayList<>();
String memKey = null;
String extraClassPath = null;
@@ -80,12 +81,18 @@ class SparkClassCommandBuilder extends AbstractCommandBuilder {
}
List<String> cmd = buildJavaCommand(extraClassPath);
+
for (String key : javaOptsKeys) {
- addOptionString(cmd, System.getenv(key));
+ String envValue = System.getenv(key);
+ if (!isEmpty(envValue) && envValue.contains("Xmx")) {
+ String msg = String.format("%s is not allowed to specify max heap(Xmx) memory settings " +
+ "(was %s). Use the corresponding configuration instead.", key, envValue);
+ throw new IllegalArgumentException(msg);
+ }
+ addOptionString(cmd, envValue);
}
String mem = firstNonEmpty(memKey != null ? System.getenv(memKey) : null, DEFAULT_MEM);
- cmd.add("-Xms" + mem);
cmd.add("-Xmx" + mem);
addPermGenSizeOpt(cmd);
cmd.add(className);
diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
index c31c42cd3a..6941ca903c 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
@@ -132,7 +132,8 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
}
@Override
- public List<String> buildCommand(Map<String, String> env) throws IOException {
+ public List<String> buildCommand(Map<String, String> env)
+ throws IOException, IllegalArgumentException {
if (PYSPARK_SHELL_RESOURCE.equals(appResource) && !printInfo) {
return buildPySparkShellCommand(env);
} else if (SPARKR_SHELL_RESOURCE.equals(appResource) && !printInfo) {
@@ -211,7 +212,8 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
return args;
}
- private List<String> buildSparkSubmitCommand(Map<String, String> env) throws IOException {
+ private List<String> buildSparkSubmitCommand(Map<String, String> env)
+ throws IOException, IllegalArgumentException {
// Load the properties file and check whether spark-submit will be running the app's driver
// or just launching a cluster app. When running the driver, the JVM's argument will be
// modified to cover the driver's configuration.
@@ -227,6 +229,16 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
addOptionString(cmd, System.getenv("SPARK_SUBMIT_OPTS"));
addOptionString(cmd, System.getenv("SPARK_JAVA_OPTS"));
+ // We don't want the client to specify Xmx. These have to be set by their corresponding
+ // memory flag --driver-memory or configuration entry spark.driver.memory
+ String driverExtraJavaOptions = config.get(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS);
+ if (!isEmpty(driverExtraJavaOptions) && driverExtraJavaOptions.contains("Xmx")) {
+ String msg = String.format("Not allowed to specify max heap(Xmx) memory settings through " +
+ "java options (was %s). Use the corresponding --driver-memory or " +
+ "spark.driver.memory configuration instead.", driverExtraJavaOptions);
+ throw new IllegalArgumentException(msg);
+ }
+
if (isClientMode) {
// Figuring out where the memory value come from is a little tricky due to precedence.
// Precedence is observed in the following order:
@@ -240,9 +252,8 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
isThriftServer(mainClass) ? System.getenv("SPARK_DAEMON_MEMORY") : null;
String memory = firstNonEmpty(tsMemory, config.get(SparkLauncher.DRIVER_MEMORY),
System.getenv("SPARK_DRIVER_MEMORY"), System.getenv("SPARK_MEM"), DEFAULT_MEM);
- cmd.add("-Xms" + memory);
cmd.add("-Xmx" + memory);
- addOptionString(cmd, config.get(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS));
+ addOptionString(cmd, driverExtraJavaOptions);
mergeEnvPathList(env, getLibPathEnvName(),
config.get(SparkLauncher.DRIVER_EXTRA_LIBRARY_PATH));
}