aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPatrick Wendell <pwendell@gmail.com>2014-01-07 23:56:04 -0800
committerPatrick Wendell <pwendell@gmail.com>2014-01-08 00:09:12 -0800
commitc78b381e91c9902a1510a2ed4ec5c898b51adfe8 (patch)
treee053e22560836a54d072b08604ddbba74934d8f8
parentd0533f704681adccc8fe2b814dc9e5082646057a (diff)
downloadspark-c78b381e91c9902a1510a2ed4ec5c898b51adfe8.tar.gz
spark-c78b381e91c9902a1510a2ed4ec5c898b51adfe8.tar.bz2
spark-c78b381e91c9902a1510a2ed4ec5c898b51adfe8.zip
Fixes
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/Client.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala2
-rw-r--r--docs/spark-standalone.md5
3 files changed, 5 insertions, 4 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/Client.scala b/core/src/main/scala/org/apache/spark/deploy/Client.scala
index 0475bb17c0..43b9b1cff9 100644
--- a/core/src/main/scala/org/apache/spark/deploy/Client.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/Client.scala
@@ -55,7 +55,7 @@ class DriverActor(master: String, response: Promise[(Boolean, String)]) extends
/**
* Executable utility for starting and terminating drivers inside of a standalone cluster.
*/
-object DriverClient {
+object Client {
def main(args: Array[String]) {
val driverArgs = new ClientArguments(args)
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
index a9cb998cc2..18885d7ca6 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
@@ -100,7 +100,7 @@ private[spark] class ExecutorRunner(
def getCommandSeq = {
val command = Command(appDesc.command.mainClass,
- appDesc.command.arguments.map(substituteVariables), appDesc.command.environment)
+ appDesc.command.arguments.map(substituteVariables) ++ Seq(appId), appDesc.command.environment)
CommandUtils.buildCommandSeq(command, memory, sparkHome.getAbsolutePath)
}
diff --git a/docs/spark-standalone.md b/docs/spark-standalone.md
index 7da64749b7..ecd642cc60 100644
--- a/docs/spark-standalone.md
+++ b/docs/spark-standalone.md
@@ -151,19 +151,20 @@ You can also pass an option `-c <numCores>` to control the number of cores that
You may also run your application entirely inside of the cluster by submitting your application driver using the submission client. The syntax for submitting applications is as follows:
- ./spark-class org.apache.spark.deploy.client.DriverClient launch
+ ./spark-class org.apache.spark.deploy.Client launch
[client-options] \
<cluster-url> <application-jar-url> <main-class> \
[application-options]
cluster-url: The URL of the master node.
- application-jar-url: Path to a bundled jar including your application and all dependencies. Currently, the URL must be visible from inside of your cluster, for instance, in an HDFS directory.
+ application-jar-url: Path to a bundled jar including your application and all dependencies. Currently, the URL must be globally visible inside of your cluster, for instance, an `hdfs://` path or a `file://` path that is present on all nodes.
main-class: The entry point for your application.
Client Options:
--memory <count> (amount of memory, in MB, allocated for your driver program)
--cores <count> (number of cores allocated for your driver program)
--supervise (whether to automatically restart your driver on application or node failure)
+ --verbose (prints increased logging output)
Keep in mind that your driver program will be executed on a remote worker machine. You can control the execution environment in the following ways: