aboutsummaryrefslogtreecommitdiff
path: root/launcher
diff options
context:
space:
mode:
authorMarcelo Vanzin <vanzin@cloudera.com>2015-06-05 14:32:00 +0200
committerSean Owen <sowen@cloudera.com>2015-06-05 14:32:00 +0200
commit700312e12f9588f01a592d6eac7bff7eb366ac8f (patch)
treed0429a8bb9e78dd2970c12e332a2610f6898d313 /launcher
parent019dc9f558cf7c0b708d3b1f0882b0c19134ffb6 (diff)
downloadspark-700312e12f9588f01a592d6eac7bff7eb366ac8f.tar.gz
spark-700312e12f9588f01a592d6eac7bff7eb366ac8f.tar.bz2
spark-700312e12f9588f01a592d6eac7bff7eb366ac8f.zip
[SPARK-6324] [CORE] Centralize handling of script usage messages.
Reorganize code so that the launcher library handles most of the work of printing usage messages, instead of having an awkward protocol between the library and the scripts for that. This mostly applies to SparkSubmit, since the launcher lib does not do command line parsing for classes invoked in other ways, and thus cannot handle failures for those. Most scripts end up going through SparkSubmit, though, so it all works. The change adds a new, internal command line switch, "--usage-error", which prints the usage message and exits with a non-zero status. Scripts can override the command printed in the usage message by setting an environment variable - this avoids having to grep the output of SparkSubmit to remove references to the "spark-submit" script. The only sub-optimal part of the change is the special handling for the spark-sql usage, which is now done in SparkSubmitArguments. Author: Marcelo Vanzin <vanzin@cloudera.com> Closes #5841 from vanzin/SPARK-6324 and squashes the following commits: 2821481 [Marcelo Vanzin] Merge branch 'master' into SPARK-6324 bf139b5 [Marcelo Vanzin] Filter output of Spark SQL CLI help. c6609bf [Marcelo Vanzin] Fix exit code never being used when printing usage messages. 6bc1b41 [Marcelo Vanzin] [SPARK-6324] [core] Centralize handling of script usage messages.
Diffstat (limited to 'launcher')
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/Main.java83
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java18
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java2
3 files changed, 59 insertions, 44 deletions
diff --git a/launcher/src/main/java/org/apache/spark/launcher/Main.java b/launcher/src/main/java/org/apache/spark/launcher/Main.java
index 929b29a49e..62492f9baf 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/Main.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/Main.java
@@ -53,21 +53,33 @@ class Main {
List<String> args = new ArrayList<String>(Arrays.asList(argsArray));
String className = args.remove(0);
- boolean printLaunchCommand;
- boolean printUsage;
+ boolean printLaunchCommand = !isEmpty(System.getenv("SPARK_PRINT_LAUNCH_COMMAND"));
AbstractCommandBuilder builder;
- try {
- if (className.equals("org.apache.spark.deploy.SparkSubmit")) {
+ if (className.equals("org.apache.spark.deploy.SparkSubmit")) {
+ try {
builder = new SparkSubmitCommandBuilder(args);
- } else {
- builder = new SparkClassCommandBuilder(className, args);
+ } catch (IllegalArgumentException e) {
+ printLaunchCommand = false;
+ System.err.println("Error: " + e.getMessage());
+ System.err.println();
+
+ MainClassOptionParser parser = new MainClassOptionParser();
+ try {
+ parser.parse(args);
+ } catch (Exception ignored) {
+ // Ignore parsing exceptions.
+ }
+
+ List<String> help = new ArrayList<String>();
+ if (parser.className != null) {
+ help.add(parser.CLASS);
+ help.add(parser.className);
+ }
+ help.add(parser.USAGE_ERROR);
+ builder = new SparkSubmitCommandBuilder(help);
}
- printLaunchCommand = !isEmpty(System.getenv("SPARK_PRINT_LAUNCH_COMMAND"));
- printUsage = false;
- } catch (IllegalArgumentException e) {
- builder = new UsageCommandBuilder(e.getMessage());
- printLaunchCommand = false;
- printUsage = true;
+ } else {
+ builder = new SparkClassCommandBuilder(className, args);
}
Map<String, String> env = new HashMap<String, String>();
@@ -78,13 +90,7 @@ class Main {
}
if (isWindows()) {
- // When printing the usage message, we can't use "cmd /v" since that prevents the env
- // variable from being seen in the caller script. So do not call prepareWindowsCommand().
- if (printUsage) {
- System.out.println(join(" ", cmd));
- } else {
- System.out.println(prepareWindowsCommand(cmd, env));
- }
+ System.out.println(prepareWindowsCommand(cmd, env));
} else {
// In bash, use NULL as the arg separator since it cannot be used in an argument.
List<String> bashCmd = prepareBashCommand(cmd, env);
@@ -135,33 +141,30 @@ class Main {
}
/**
- * Internal builder used when command line parsing fails. This will behave differently depending
- * on the platform:
- *
- * - On Unix-like systems, it will print a call to the "usage" function with two arguments: the
- * the error string, and the exit code to use. The function is expected to print the command's
- * usage and exit with the provided exit code. The script should use "export -f usage" after
- * declaring a function called "usage", so that the function is available to downstream scripts.
- *
- * - On Windows it will set the variable "SPARK_LAUNCHER_USAGE_ERROR" to the usage error message.
- * The batch script should check for this variable and print its usage, since batch scripts
- * don't really support the "export -f" functionality used in bash.
+ * A parser used when command line parsing fails for spark-submit. It's used as a best-effort
+ * at trying to identify the class the user wanted to invoke, since that may require special
+ * usage strings (handled by SparkSubmitArguments).
*/
- private static class UsageCommandBuilder extends AbstractCommandBuilder {
+ private static class MainClassOptionParser extends SparkSubmitOptionParser {
- private final String message;
+ String className;
- UsageCommandBuilder(String message) {
- this.message = message;
+ @Override
+ protected boolean handle(String opt, String value) {
+ if (opt == CLASS) {
+ className = value;
+ }
+ return false;
}
@Override
- public List<String> buildCommand(Map<String, String> env) {
- if (isWindows()) {
- return Arrays.asList("set", "SPARK_LAUNCHER_USAGE_ERROR=" + message);
- } else {
- return Arrays.asList("usage", message, "1");
- }
+ protected boolean handleUnknown(String opt) {
+ return false;
+ }
+
+ @Override
+ protected void handleExtraArgs(List<String> extra) {
+
}
}
diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
index 7d387d406e..3e5a2820b6 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
@@ -77,6 +77,7 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
}
private final List<String> sparkArgs;
+ private final boolean printHelp;
/**
* Controls whether mixing spark-submit arguments with app arguments is allowed. This is needed
@@ -87,10 +88,11 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
SparkSubmitCommandBuilder() {
this.sparkArgs = new ArrayList<String>();
+ this.printHelp = false;
}
SparkSubmitCommandBuilder(List<String> args) {
- this();
+ this.sparkArgs = new ArrayList<String>();
List<String> submitArgs = args;
if (args.size() > 0 && args.get(0).equals(PYSPARK_SHELL)) {
this.allowsMixedArguments = true;
@@ -104,14 +106,16 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
this.allowsMixedArguments = false;
}
- new OptionParser().parse(submitArgs);
+ OptionParser parser = new OptionParser();
+ parser.parse(submitArgs);
+ this.printHelp = parser.helpRequested;
}
@Override
public List<String> buildCommand(Map<String, String> env) throws IOException {
- if (PYSPARK_SHELL_RESOURCE.equals(appResource)) {
+ if (PYSPARK_SHELL_RESOURCE.equals(appResource) && !printHelp) {
return buildPySparkShellCommand(env);
- } else if (SPARKR_SHELL_RESOURCE.equals(appResource)) {
+ } else if (SPARKR_SHELL_RESOURCE.equals(appResource) && !printHelp) {
return buildSparkRCommand(env);
} else {
return buildSparkSubmitCommand(env);
@@ -311,6 +315,8 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
private class OptionParser extends SparkSubmitOptionParser {
+ boolean helpRequested = false;
+
@Override
protected boolean handle(String opt, String value) {
if (opt.equals(MASTER)) {
@@ -341,6 +347,9 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
allowsMixedArguments = true;
appResource = specialClasses.get(value);
}
+ } else if (opt.equals(HELP) || opt.equals(USAGE_ERROR)) {
+ helpRequested = true;
+ sparkArgs.add(opt);
} else {
sparkArgs.add(opt);
if (value != null) {
@@ -360,6 +369,7 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
appArgs.add(opt);
return true;
} else {
+ checkArgument(!opt.startsWith("-"), "Unrecognized option: %s", opt);
sparkArgs.add(opt);
return false;
}
diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java
index 2290000876..b88bba883a 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java
@@ -61,6 +61,7 @@ class SparkSubmitOptionParser {
// Options that do not take arguments.
protected final String HELP = "--help";
protected final String SUPERVISE = "--supervise";
+ protected final String USAGE_ERROR = "--usage-error";
protected final String VERBOSE = "--verbose";
protected final String VERSION = "--version";
@@ -120,6 +121,7 @@ class SparkSubmitOptionParser {
final String[][] switches = {
{ HELP, "-h" },
{ SUPERVISE },
+ { USAGE_ERROR },
{ VERBOSE, "-v" },
{ VERSION },
};