aboutsummaryrefslogtreecommitdiff
path: root/sbin
diff options
context:
space:
mode:
authorCharles Yeh <charlesyeh@dropbox.com>2015-11-09 13:22:05 +0100
committerSean Owen <sowen@cloudera.com>2015-11-09 13:22:05 +0100
commit9e48cdfbdecc9554a425ba35c0252910fd1e8faa (patch)
treec064ec4f2c51cda5e2f4df2c9d156b6f2cff6fbb /sbin
parentd8b50f70298dbf45e91074ee2d751fee7eecb119 (diff)
downloadspark-9e48cdfbdecc9554a425ba35c0252910fd1e8faa.tar.gz
spark-9e48cdfbdecc9554a425ba35c0252910fd1e8faa.tar.bz2
spark-9e48cdfbdecc9554a425ba35c0252910fd1e8faa.zip
[SPARK-11218][CORE] show help messages for start-slave and start-master
Addressing https://issues.apache.org/jira/browse/SPARK-11218, mostly copied start-thriftserver.sh. ``` charlesyeh-mbp:spark charlesyeh$ ./sbin/start-master.sh --help Usage: Master [options] Options: -i HOST, --ip HOST Hostname to listen on (deprecated, please use --host or -h) -h HOST, --host HOST Hostname to listen on -p PORT, --port PORT Port to listen on (default: 7077) --webui-port PORT Port for web UI (default: 8080) --properties-file FILE Path to a custom Spark properties file. Default is conf/spark-defaults.conf. ``` ``` charlesyeh-mbp:spark charlesyeh$ ./sbin/start-slave.sh Usage: Worker [options] <master> Master must be a URL of the form spark://hostname:port Options: -c CORES, --cores CORES Number of cores to use -m MEM, --memory MEM Amount of memory to use (e.g. 1000M, 2G) -d DIR, --work-dir DIR Directory to run apps in (default: SPARK_HOME/work) -i HOST, --ip IP Hostname to listen on (deprecated, please use --host or -h) -h HOST, --host HOST Hostname to listen on -p PORT, --port PORT Port to listen on (default: random) --webui-port PORT Port for web UI (default: 8081) --properties-file FILE Path to a custom Spark properties file. Default is conf/spark-defaults.conf. ``` Author: Charles Yeh <charlesyeh@dropbox.com> Closes #9432 from CharlesYeh/helpmsg.
Diffstat (limited to 'sbin')
-rwxr-xr-xsbin/start-master.sh24
-rwxr-xr-xsbin/start-slave.sh24
2 files changed, 34 insertions, 14 deletions
diff --git a/sbin/start-master.sh b/sbin/start-master.sh
index c20e19a841..9f2e14dff6 100755
--- a/sbin/start-master.sh
+++ b/sbin/start-master.sh
@@ -23,6 +23,20 @@ if [ -z "${SPARK_HOME}" ]; then
export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
fi
+# NOTE: This exact class name is matched downstream by SparkSubmit.
+# Any changes need to be reflected there.
+CLASS="org.apache.spark.deploy.master.Master"
+
+if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
+ echo "Usage: ./sbin/start-master.sh [options]"
+ pattern="Usage:"
+ pattern+="\|Using Spark's default log4j profile:"
+ pattern+="\|Registered signal handlers for"
+
+ "${SPARK_HOME}"/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
+ exit 1
+fi
+
ORIGINAL_ARGS="$@"
START_TACHYON=false
@@ -30,7 +44,7 @@ START_TACHYON=false
while (( "$#" )); do
case $1 in
--with-tachyon)
- if [ ! -e "$sbin"/../tachyon/bin/tachyon ]; then
+ if [ ! -e "${SPARK_HOME}"/tachyon/bin/tachyon ]; then
echo "Error: --with-tachyon specified, but tachyon not found."
exit -1
fi
@@ -56,12 +70,12 @@ if [ "$SPARK_MASTER_WEBUI_PORT" = "" ]; then
SPARK_MASTER_WEBUI_PORT=8080
fi
-"${SPARK_HOME}/sbin"/spark-daemon.sh start org.apache.spark.deploy.master.Master 1 \
+"${SPARK_HOME}/sbin"/spark-daemon.sh start $CLASS 1 \
--ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT \
$ORIGINAL_ARGS
if [ "$START_TACHYON" == "true" ]; then
- "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon bootstrap-conf $SPARK_MASTER_IP
- "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon format -s
- "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon-start.sh master
+ "${SPARK_HOME}"/tachyon/bin/tachyon bootstrap-conf $SPARK_MASTER_IP
+ "${SPARK_HOME}"/tachyon/bin/tachyon format -s
+ "${SPARK_HOME}"/tachyon/bin/tachyon-start.sh master
fi
diff --git a/sbin/start-slave.sh b/sbin/start-slave.sh
index 21455648d1..8c268b8859 100755
--- a/sbin/start-slave.sh
+++ b/sbin/start-slave.sh
@@ -31,18 +31,24 @@
# worker. Subsequent workers will increment this
# number. Default is 8081.
-usage="Usage: start-slave.sh <spark-master-URL> where <spark-master-URL> is like spark://localhost:7077"
-
-if [ $# -lt 1 ]; then
- echo $usage
- echo Called as start-slave.sh $*
- exit 1
-fi
-
if [ -z "${SPARK_HOME}" ]; then
export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
fi
+# NOTE: This exact class name is matched downstream by SparkSubmit.
+# Any changes need to be reflected there.
+CLASS="org.apache.spark.deploy.worker.Worker"
+
+if [[ $# -lt 1 ]] || [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
+ echo "Usage: ./sbin/start-slave.sh [options] <master>"
+ pattern="Usage:"
+ pattern+="\|Using Spark's default log4j profile:"
+ pattern+="\|Registered signal handlers for"
+
+ "${SPARK_HOME}"/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
+ exit 1
+fi
+
. "${SPARK_HOME}/sbin/spark-config.sh"
. "${SPARK_HOME}/bin/load-spark-env.sh"
@@ -72,7 +78,7 @@ function start_instance {
fi
WEBUI_PORT=$(( $SPARK_WORKER_WEBUI_PORT + $WORKER_NUM - 1 ))
- "${SPARK_HOME}/sbin"/spark-daemon.sh start org.apache.spark.deploy.worker.Worker $WORKER_NUM \
+ "${SPARK_HOME}/sbin"/spark-daemon.sh start $CLASS $WORKER_NUM \
--webui-port "$WEBUI_PORT" $PORT_FLAG $PORT_NUM $MASTER "$@"
}