aboutsummaryrefslogtreecommitdiff
path: root/launcher/src/main
diff options
context:
space:
mode:
Diffstat (limited to 'launcher/src/main')
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/Main.java83
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java18
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java2
3 files changed, 59 insertions, 44 deletions
diff --git a/launcher/src/main/java/org/apache/spark/launcher/Main.java b/launcher/src/main/java/org/apache/spark/launcher/Main.java
index 929b29a49e..62492f9baf 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/Main.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/Main.java
@@ -53,21 +53,33 @@ class Main {
List<String> args = new ArrayList<String>(Arrays.asList(argsArray));
String className = args.remove(0);
- boolean printLaunchCommand;
- boolean printUsage;
+ boolean printLaunchCommand = !isEmpty(System.getenv("SPARK_PRINT_LAUNCH_COMMAND"));
AbstractCommandBuilder builder;
- try {
- if (className.equals("org.apache.spark.deploy.SparkSubmit")) {
+ if (className.equals("org.apache.spark.deploy.SparkSubmit")) {
+ try {
builder = new SparkSubmitCommandBuilder(args);
- } else {
- builder = new SparkClassCommandBuilder(className, args);
+ } catch (IllegalArgumentException e) {
+ printLaunchCommand = false;
+ System.err.println("Error: " + e.getMessage());
+ System.err.println();
+
+ MainClassOptionParser parser = new MainClassOptionParser();
+ try {
+ parser.parse(args);
+ } catch (Exception ignored) {
+ // Ignore parsing exceptions.
+ }
+
+ List<String> help = new ArrayList<String>();
+ if (parser.className != null) {
+ help.add(parser.CLASS);
+ help.add(parser.className);
+ }
+ help.add(parser.USAGE_ERROR);
+ builder = new SparkSubmitCommandBuilder(help);
}
- printLaunchCommand = !isEmpty(System.getenv("SPARK_PRINT_LAUNCH_COMMAND"));
- printUsage = false;
- } catch (IllegalArgumentException e) {
- builder = new UsageCommandBuilder(e.getMessage());
- printLaunchCommand = false;
- printUsage = true;
+ } else {
+ builder = new SparkClassCommandBuilder(className, args);
}
Map<String, String> env = new HashMap<String, String>();
@@ -78,13 +90,7 @@ class Main {
}
if (isWindows()) {
- // When printing the usage message, we can't use "cmd /v" since that prevents the env
- // variable from being seen in the caller script. So do not call prepareWindowsCommand().
- if (printUsage) {
- System.out.println(join(" ", cmd));
- } else {
- System.out.println(prepareWindowsCommand(cmd, env));
- }
+ System.out.println(prepareWindowsCommand(cmd, env));
} else {
// In bash, use NULL as the arg separator since it cannot be used in an argument.
List<String> bashCmd = prepareBashCommand(cmd, env);
@@ -135,33 +141,30 @@ class Main {
}
/**
- * Internal builder used when command line parsing fails. This will behave differently depending
- * on the platform:
- *
- * - On Unix-like systems, it will print a call to the "usage" function with two arguments: the
- * the error string, and the exit code to use. The function is expected to print the command's
- * usage and exit with the provided exit code. The script should use "export -f usage" after
- * declaring a function called "usage", so that the function is available to downstream scripts.
- *
- * - On Windows it will set the variable "SPARK_LAUNCHER_USAGE_ERROR" to the usage error message.
- * The batch script should check for this variable and print its usage, since batch scripts
- * don't really support the "export -f" functionality used in bash.
+ * A parser used when command line parsing fails for spark-submit. It's used as a best-effort
+ * at trying to identify the class the user wanted to invoke, since that may require special
+ * usage strings (handled by SparkSubmitArguments).
*/
- private static class UsageCommandBuilder extends AbstractCommandBuilder {
+ private static class MainClassOptionParser extends SparkSubmitOptionParser {
- private final String message;
+ String className;
- UsageCommandBuilder(String message) {
- this.message = message;
+ @Override
+ protected boolean handle(String opt, String value) {
+ if (opt == CLASS) {
+ className = value;
+ }
+ return false;
}
@Override
- public List<String> buildCommand(Map<String, String> env) {
- if (isWindows()) {
- return Arrays.asList("set", "SPARK_LAUNCHER_USAGE_ERROR=" + message);
- } else {
- return Arrays.asList("usage", message, "1");
- }
+ protected boolean handleUnknown(String opt) {
+ return false;
+ }
+
+ @Override
+ protected void handleExtraArgs(List<String> extra) {
+
}
}
diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
index 7d387d406e..3e5a2820b6 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
@@ -77,6 +77,7 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
}
private final List<String> sparkArgs;
+ private final boolean printHelp;
/**
* Controls whether mixing spark-submit arguments with app arguments is allowed. This is needed
@@ -87,10 +88,11 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
SparkSubmitCommandBuilder() {
this.sparkArgs = new ArrayList<String>();
+ this.printHelp = false;
}
SparkSubmitCommandBuilder(List<String> args) {
- this();
+ this.sparkArgs = new ArrayList<String>();
List<String> submitArgs = args;
if (args.size() > 0 && args.get(0).equals(PYSPARK_SHELL)) {
this.allowsMixedArguments = true;
@@ -104,14 +106,16 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
this.allowsMixedArguments = false;
}
- new OptionParser().parse(submitArgs);
+ OptionParser parser = new OptionParser();
+ parser.parse(submitArgs);
+ this.printHelp = parser.helpRequested;
}
@Override
public List<String> buildCommand(Map<String, String> env) throws IOException {
- if (PYSPARK_SHELL_RESOURCE.equals(appResource)) {
+ if (PYSPARK_SHELL_RESOURCE.equals(appResource) && !printHelp) {
return buildPySparkShellCommand(env);
- } else if (SPARKR_SHELL_RESOURCE.equals(appResource)) {
+ } else if (SPARKR_SHELL_RESOURCE.equals(appResource) && !printHelp) {
return buildSparkRCommand(env);
} else {
return buildSparkSubmitCommand(env);
@@ -311,6 +315,8 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
private class OptionParser extends SparkSubmitOptionParser {
+ boolean helpRequested = false;
+
@Override
protected boolean handle(String opt, String value) {
if (opt.equals(MASTER)) {
@@ -341,6 +347,9 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
allowsMixedArguments = true;
appResource = specialClasses.get(value);
}
+ } else if (opt.equals(HELP) || opt.equals(USAGE_ERROR)) {
+ helpRequested = true;
+ sparkArgs.add(opt);
} else {
sparkArgs.add(opt);
if (value != null) {
@@ -360,6 +369,7 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
appArgs.add(opt);
return true;
} else {
+ checkArgument(!opt.startsWith("-"), "Unrecognized option: %s", opt);
sparkArgs.add(opt);
return false;
}
diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java
index 2290000876..b88bba883a 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java
@@ -61,6 +61,7 @@ class SparkSubmitOptionParser {
// Options that do not take arguments.
protected final String HELP = "--help";
protected final String SUPERVISE = "--supervise";
+ protected final String USAGE_ERROR = "--usage-error";
protected final String VERBOSE = "--verbose";
protected final String VERSION = "--version";
@@ -120,6 +121,7 @@ class SparkSubmitOptionParser {
final String[][] switches = {
{ HELP, "-h" },
{ SUPERVISE },
+ { USAGE_ERROR },
{ VERBOSE, "-v" },
{ VERSION },
};