aboutsummaryrefslogtreecommitdiff
path: root/yarn
diff options
context:
space:
mode:
authorSandy Ryza <sandy@cloudera.com>2014-02-26 10:00:02 -0600
committerThomas Graves <tgraves@apache.org>2014-02-26 10:00:02 -0600
commitb8a1871953058c67b49b7f8455cbb417d5b50ab6 (patch)
tree9c1029df66bb11cadbc6308550a6f1b559f92346 /yarn
parentc852201ce95c7c982ff3794c114427eb33e92922 (diff)
downloadspark-b8a1871953058c67b49b7f8455cbb417d5b50ab6.tar.gz
spark-b8a1871953058c67b49b7f8455cbb417d5b50ab6.tar.bz2
spark-b8a1871953058c67b49b7f8455cbb417d5b50ab6.zip
SPARK-1053. Don't require SPARK_YARN_APP_JAR
It looks this just requires taking out the checks. I verified that, with the patch, I was able to run spark-shell through yarn without setting the environment variable. Author: Sandy Ryza <sandy@cloudera.com> Closes #553 from sryza/sandy-spark-1053 and squashes the following commits: b037676 [Sandy Ryza] SPARK-1053. Don't require SPARK_YARN_APP_JAR
Diffstat (limited to 'yarn')
-rw-r--r--yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala4
-rw-r--r--yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala3
-rw-r--r--yarn/common/src/main/scala/org/apache/spark/scheduler/cluster/YarnClientSchedulerBackend.scala6
3 files changed, 5 insertions, 8 deletions
diff --git a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala
index 1419f215c7..fe37168e5a 100644
--- a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala
+++ b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala
@@ -108,7 +108,7 @@ class ClientArguments(val args: Array[String], val sparkConf: SparkConf) {
args = tail
case Nil =>
- if (userJar == null || userClass == null) {
+ if (userClass == null) {
printUsageAndExit(1)
}
@@ -129,7 +129,7 @@ class ClientArguments(val args: Array[String], val sparkConf: SparkConf) {
System.err.println(
"Usage: org.apache.spark.deploy.yarn.Client [options] \n" +
"Options:\n" +
- " --jar JAR_PATH Path to your application's JAR file (required)\n" +
+ " --jar JAR_PATH Path to your application's JAR file (required in yarn-standalone mode)\n" +
" --class CLASS_NAME Name of your application's main class (required)\n" +
" --args ARGS Arguments to be passed to your application's main class.\n" +
" Mutliple invocations are possible, each will be passed in order.\n" +
diff --git a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
index 2db5744be1..24520bd21b 100644
--- a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
+++ b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
@@ -68,7 +68,8 @@ trait ClientBase extends Logging {
def validateArgs() = {
Map(
(System.getenv("SPARK_JAR") == null) -> "Error: You must set SPARK_JAR environment variable!",
- (args.userJar == null) -> "Error: You must specify a user jar!",
+ ((args.userJar == null && args.amClass == classOf[ApplicationMaster].getName) ->
+ "Error: You must specify a user jar when running in standalone mode!"),
(args.userClass == null) -> "Error: You must specify a user class!",
(args.numWorkers <= 0) -> "Error: You must specify at least 1 worker!",
(args.amMemory <= YarnAllocationHandler.MEMORY_OVERHEAD) -> ("Error: AM memory size must be" +
diff --git a/yarn/common/src/main/scala/org/apache/spark/scheduler/cluster/YarnClientSchedulerBackend.scala b/yarn/common/src/main/scala/org/apache/spark/scheduler/cluster/YarnClientSchedulerBackend.scala
index 22e55e0c60..e7130d2407 100644
--- a/yarn/common/src/main/scala/org/apache/spark/scheduler/cluster/YarnClientSchedulerBackend.scala
+++ b/yarn/common/src/main/scala/org/apache/spark/scheduler/cluster/YarnClientSchedulerBackend.scala
@@ -44,10 +44,6 @@ private[spark] class YarnClientSchedulerBackend(
override def start() {
super.start()
- val userJar = System.getenv("SPARK_YARN_APP_JAR")
- if (userJar == null)
- throw new SparkException("env SPARK_YARN_APP_JAR is not set")
-
val driverHost = conf.get("spark.driver.host")
val driverPort = conf.get("spark.driver.port")
val hostport = driverHost + ":" + driverPort
@@ -55,7 +51,7 @@ private[spark] class YarnClientSchedulerBackend(
val argsArrayBuf = new ArrayBuffer[String]()
argsArrayBuf += (
"--class", "notused",
- "--jar", userJar,
+ "--jar", null,
"--args", hostport,
"--master-class", "org.apache.spark.deploy.yarn.WorkerLauncher"
)