aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPrashant Sharma <prashant.s@imaginea.com>2014-01-02 18:54:08 +0530
committerPrashant Sharma <scrapcodes@gmail.com>2014-01-02 21:48:44 +0530
commit59e8009b8d5e51b6f776720de8c9ecb09e1072dc (patch)
tree742334f855f265e007e9cac07c240c1e1bfe0d68
parenta3f90a2ecf14a01aa27fc95c133b1ff375583adb (diff)
downloadspark-59e8009b8d5e51b6f776720de8c9ecb09e1072dc.tar.gz
spark-59e8009b8d5e51b6f776720de8c9ecb09e1072dc.tar.bz2
spark-59e8009b8d5e51b6f776720de8c9ecb09e1072dc.zip
a few left over document change
-rw-r--r--docs/spark-standalone.md2
-rwxr-xr-xec2/spark_ec2.py2
-rwxr-xr-xmake-distribution.sh4
3 files changed, 4 insertions, 4 deletions
diff --git a/docs/spark-standalone.md b/docs/spark-standalone.md
index ec5ae5b4f7..baa0a062f7 100644
--- a/docs/spark-standalone.md
+++ b/docs/spark-standalone.md
@@ -20,7 +20,7 @@ then modify `conf/spark-env.sh` in the `dist/` directory before deploying to all
You can start a standalone master server by executing:
- ./bin/start-master.sh
+ ./sbin/start-master.sh
Once started, the master will print out a `spark://HOST:PORT` URL for itself, which you can use to connect workers to it,
or pass as the "master" argument to `SparkContext`. You can also find this URL on
diff --git a/ec2/spark_ec2.py b/ec2/spark_ec2.py
index a2b0e7e7f4..d82a1e1490 100755
--- a/ec2/spark_ec2.py
+++ b/ec2/spark_ec2.py
@@ -436,7 +436,7 @@ def setup_cluster(conn, master_nodes, slave_nodes, opts, deploy_ssh_key):
def setup_standalone_cluster(master, slave_nodes, opts):
slave_ips = '\n'.join([i.public_dns_name for i in slave_nodes])
ssh(master, opts, "echo \"%s\" > spark/conf/slaves" % (slave_ips))
- ssh(master, opts, "/root/spark/bin/start-all.sh")
+ ssh(master, opts, "/root/spark/sbin/start-all.sh")
def setup_spark_cluster(master, opts):
ssh(master, opts, "chmod u+x spark-ec2/setup.sh")
diff --git a/make-distribution.sh b/make-distribution.sh
index 8765c7e620..0463d14762 100755
--- a/make-distribution.sh
+++ b/make-distribution.sh
@@ -31,9 +31,9 @@
#
# Recommended deploy/testing procedure (standalone mode):
# 1) Rsync / deploy the dist/ dir to one host
-# 2) cd to deploy dir; ./bin/start-master.sh
+# 2) cd to deploy dir; ./sbin/start-master.sh
# 3) Verify master is up by visiting web page, ie http://master-ip:8080. Note the spark:// URL.
-# 4) ./bin/start-slave.sh 1 <<spark:// URL>>
+# 4) ./sbin/start-slave.sh 1 <<spark:// URL>>
# 5) MASTER="spark://my-master-ip:7077" ./bin/spark-shell
#