aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorPatrick Wendell <pwendell@gmail.com>2014-04-24 23:59:16 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-04-24 23:59:16 -0700
commitdc3b640a0ab3501b678b591be3e99fbcf3badbec (patch)
tree2865c2a3cef66f061d846f6a968725e83728271b /core
parent6e101f1183f92769779bc8ac14813c063bf1ff3f (diff)
downloadspark-dc3b640a0ab3501b678b591be3e99fbcf3badbec.tar.gz
spark-dc3b640a0ab3501b678b591be3e99fbcf3badbec.tar.bz2
spark-dc3b640a0ab3501b678b591be3e99fbcf3badbec.zip
SPARK-1619 Launch spark-shell with spark-submit
This simplifies the shell a bunch and passes all arguments through to spark-submit. There is a tiny incompatibility from 0.9.1 which is that you can't put `-c` _or_ `--cores`, only `--cores`. However, spark-submit will give a good error message in this case, I don't think many people used this, and it's a trivial change for users. Author: Patrick Wendell <pwendell@gmail.com> Closes #542 from pwendell/spark-shell and squashes the following commits: 9eb3e6f [Patrick Wendell] Updating Spark docs b552459 [Patrick Wendell] Andrew's feedback 97720fa [Patrick Wendell] Review feedback aa2900b [Patrick Wendell] SPARK-1619 Launch spark-shell with spark-submit
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala10
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala2
2 files changed, 10 insertions, 2 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
index a2efddbfe3..58aa6d951a 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
@@ -38,6 +38,12 @@ object SparkSubmit {
private var clusterManager: Int = LOCAL
+ /**
+ * A special jar name that indicates the class being run is inside of Spark itself,
+ * and therefore no user jar is needed.
+ */
+ private val RESERVED_JAR_NAME = "spark-internal"
+
def main(args: Array[String]) {
val appArgs = new SparkSubmitArguments(args)
if (appArgs.verbose) {
@@ -113,7 +119,9 @@ object SparkSubmit {
if (!deployOnCluster) {
childMainClass = appArgs.mainClass
- childClasspath += appArgs.primaryResource
+ if (appArgs.primaryResource != RESERVED_JAR_NAME) {
+ childClasspath += appArgs.primaryResource
+ }
} else if (clusterManager == YARN) {
childMainClass = "org.apache.spark.deploy.yarn.Client"
childArgs += ("--jar", appArgs.primaryResource)
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
index c3e8c6b8c6..c545b093ac 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
@@ -107,7 +107,7 @@ private[spark] class SparkSubmitArguments(args: Array[String]) {
deployMode = Option(deployMode).getOrElse(System.getenv("DEPLOY_MODE"))
// Global defaults. These should be keep to minimum to avoid confusing behavior.
- master = Option(master).getOrElse("local")
+ master = Option(master).getOrElse("local[*]")
}
/** Ensure that required fields exists. Call this only once all defaults are loaded. */