aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorCheng Lian <lian.cs.zju@gmail.com>2014-08-06 12:28:35 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-08-06 12:28:35 -0700
commita6cd31108f0d73ce6823daafe8447677e03cfd13 (patch)
treeb37ec4045b01db8d5f5635fe77e6b2a50d082830 /core
parent48789117c2dd6d38e0bd8d21cdbcb989913205a6 (diff)
downloadspark-a6cd31108f0d73ce6823daafe8447677e03cfd13.tar.gz
spark-a6cd31108f0d73ce6823daafe8447677e03cfd13.tar.bz2
spark-a6cd31108f0d73ce6823daafe8447677e03cfd13.zip
[SPARK-2678][Core][SQL] A workaround for SPARK-2678
JIRA issues: - Main: [SPARK-2678](https://issues.apache.org/jira/browse/SPARK-2678) - Related: [SPARK-2874](https://issues.apache.org/jira/browse/SPARK-2874) Related PR: - #1715 This PR is both a fix for SPARK-2874 and a workaround for SPARK-2678. Fixing SPARK-2678 completely requires some API level changes that need further discussion, and we decided not to include it in Spark 1.1 release. As currently SPARK-2678 only affects Spark SQL scripts, this workaround is enough for Spark 1.1. Command line option handling logic in bash scripts looks somewhat dirty and duplicated, but it helps to provide a cleaner user interface as well as retain full downward compatibility for now. Author: Cheng Lian <lian.cs.zju@gmail.com> Closes #1801 from liancheng/spark-2874 and squashes the following commits: 8045d7a [Cheng Lian] Make sure test suites pass 8493a9e [Cheng Lian] Using eval to retain quoted arguments aed523f [Cheng Lian] Fixed typo in bin/spark-sql f12a0b1 [Cheng Lian] Worked arount SPARK-2678 daee105 [Cheng Lian] Fixed usage messages of all Spark SQL related scripts
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala39
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala12
2 files changed, 26 insertions, 25 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
index 9391f24e71..087dd4d633 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
@@ -220,6 +220,7 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
/** Fill in values by parsing user options. */
private def parseOpts(opts: Seq[String]): Unit = {
var inSparkOpts = true
+ val EQ_SEPARATED_OPT="""(--[^=]+)=(.+)""".r
// Delineates parsing of Spark options from parsing of user options.
parse(opts)
@@ -322,33 +323,21 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
verbose = true
parse(tail)
+ case EQ_SEPARATED_OPT(opt, value) :: tail =>
+ parse(opt :: value :: tail)
+
+ case value :: tail if value.startsWith("-") =>
+ SparkSubmit.printErrorAndExit(s"Unrecognized option '$value'.")
+
case value :: tail =>
- if (inSparkOpts) {
- value match {
- // convert --foo=bar to --foo bar
- case v if v.startsWith("--") && v.contains("=") && v.split("=").size == 2 =>
- val parts = v.split("=")
- parse(Seq(parts(0), parts(1)) ++ tail)
- case v if v.startsWith("-") =>
- val errMessage = s"Unrecognized option '$value'."
- SparkSubmit.printErrorAndExit(errMessage)
- case v =>
- primaryResource =
- if (!SparkSubmit.isShell(v) && !SparkSubmit.isInternal(v)) {
- Utils.resolveURI(v).toString
- } else {
- v
- }
- inSparkOpts = false
- isPython = SparkSubmit.isPython(v)
- parse(tail)
+ primaryResource =
+ if (!SparkSubmit.isShell(value) && !SparkSubmit.isInternal(value)) {
+ Utils.resolveURI(value).toString
+ } else {
+ value
}
- } else {
- if (!value.isEmpty) {
- childArgs += value
- }
- parse(tail)
- }
+ isPython = SparkSubmit.isPython(value)
+ childArgs ++= tail
case Nil =>
}
diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
index a5cdcfb5de..7e1ef80c84 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
@@ -106,6 +106,18 @@ class SparkSubmitSuite extends FunSuite with Matchers {
appArgs.childArgs should be (Seq("some", "--weird", "args"))
}
+ test("handles arguments to user program with name collision") {
+ val clArgs = Seq(
+ "--name", "myApp",
+ "--class", "Foo",
+ "userjar.jar",
+ "--master", "local",
+ "some",
+ "--weird", "args")
+ val appArgs = new SparkSubmitArguments(clArgs)
+ appArgs.childArgs should be (Seq("--master", "local", "some", "--weird", "args"))
+ }
+
test("handles YARN cluster mode") {
val clArgs = Seq(
"--deploy-mode", "cluster",