aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorMarcelo Vanzin <vanzin@cloudera.com>2016-05-10 10:35:54 -0700
committerMarcelo Vanzin <vanzin@cloudera.com>2016-05-10 10:35:54 -0700
commit0b9cae42426e14060bc6182c037fd715f35a2d23 (patch)
treefbf356424566ce8bd027fa034f0be6e58ce73066 /core
parent36c5892b4631d1d5d70e569fce12efedd82bdef0 (diff)
downloadspark-0b9cae42426e14060bc6182c037fd715f35a2d23.tar.gz
spark-0b9cae42426e14060bc6182c037fd715f35a2d23.tar.bz2
spark-0b9cae42426e14060bc6182c037fd715f35a2d23.zip
[SPARK-11249][LAUNCHER] Throw error if app resource is not provided.
Without this, the code would build an invalid spark-submit command line, and a more cryptic error would be presented to the user. Also, expose a constant that allows users to set a dummy resource in cases where they don't need an actual resource file; for backwards compatibility, that uses the same "spark-internal" resource that Spark itself uses. Tested via unit tests, run-example, spark-shell, and running the thrift server with mixed spark and hive command line arguments. Author: Marcelo Vanzin <vanzin@cloudera.com> Closes #12909 from vanzin/SPARK-11249.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala9
-rw-r--r--core/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java2
-rw-r--r--core/src/test/scala/org/apache/spark/launcher/LauncherBackendSuite.scala2
3 files changed, 5 insertions, 8 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
index 755c4b6ec1..9075e3eb3f 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
@@ -43,6 +43,7 @@ import org.apache.ivy.plugins.resolver.{ChainResolver, FileSystemResolver, IBibl
import org.apache.spark.{SPARK_VERSION, SparkException, SparkUserAppException}
import org.apache.spark.api.r.RUtils
import org.apache.spark.deploy.rest._
+import org.apache.spark.launcher.SparkLauncher
import org.apache.spark.util.{ChildFirstURLClassLoader, MutableURLClassLoader, Utils}
@@ -75,10 +76,6 @@ object SparkSubmit {
private val CLUSTER = 2
private val ALL_DEPLOY_MODES = CLIENT | CLUSTER
- // A special jar name that indicates the class being run is inside of Spark itself, and therefore
- // no user jar is needed.
- private val SPARK_INTERNAL = "spark-internal"
-
// Special primary resource names that represent shells rather than application jars.
private val SPARK_SHELL = "spark-shell"
private val PYSPARK_SHELL = "pyspark-shell"
@@ -575,7 +572,7 @@ object SparkSubmit {
childArgs += ("--primary-r-file", mainFile)
childArgs += ("--class", "org.apache.spark.deploy.RRunner")
} else {
- if (args.primaryResource != SPARK_INTERNAL) {
+ if (args.primaryResource != SparkLauncher.NO_RESOURCE) {
childArgs += ("--jar", args.primaryResource)
}
childArgs += ("--class", args.mainClass)
@@ -795,7 +792,7 @@ object SparkSubmit {
}
private[deploy] def isInternal(res: String): Boolean = {
- res == SPARK_INTERNAL
+ res == SparkLauncher.NO_RESOURCE
}
/**
diff --git a/core/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java b/core/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java
index 3e47bfc274..8ca54b24d8 100644
--- a/core/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java
+++ b/core/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java
@@ -94,7 +94,7 @@ public class SparkLauncherSuite {
SparkLauncher launcher = new SparkLauncher(env)
.setSparkHome(System.getProperty("spark.test.home"))
.setMaster("local")
- .setAppResource("spark-internal")
+ .setAppResource(SparkLauncher.NO_RESOURCE)
.addSparkArg(opts.CONF,
String.format("%s=-Dfoo=ShouldBeOverriddenBelow", SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS))
.setConf(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS,
diff --git a/core/src/test/scala/org/apache/spark/launcher/LauncherBackendSuite.scala b/core/src/test/scala/org/apache/spark/launcher/LauncherBackendSuite.scala
index 713560d3dd..cac15a1dc4 100644
--- a/core/src/test/scala/org/apache/spark/launcher/LauncherBackendSuite.scala
+++ b/core/src/test/scala/org/apache/spark/launcher/LauncherBackendSuite.scala
@@ -48,7 +48,7 @@ class LauncherBackendSuite extends SparkFunSuite with Matchers {
.setConf("spark.ui.enabled", "false")
.setConf(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS, s"-Dtest.appender=console")
.setMaster(master)
- .setAppResource("spark-internal")
+ .setAppResource(SparkLauncher.NO_RESOURCE)
.setMainClass(TestApp.getClass.getName().stripSuffix("$"))
.startApplication()