aboutsummaryrefslogtreecommitdiff
path: root/sbin
diff options
context:
space:
mode:
authorjerryshao <sshao@hortonworks.com>2015-11-04 10:49:34 +0000
committerSean Owen <sowen@cloudera.com>2015-11-04 10:49:34 +0000
commit8aff36e91de0fee2f3f56c6d240bb203b5bb48ba (patch)
tree0afdded361cb75e7658053953abfdb484da78ced /sbin
parent2692bdb7dbf36d6247f595d5fd0cb9cda89e1fdd (diff)
downloadspark-8aff36e91de0fee2f3f56c6d240bb203b5bb48ba.tar.gz
spark-8aff36e91de0fee2f3f56c6d240bb203b5bb48ba.tar.bz2
spark-8aff36e91de0fee2f3f56c6d240bb203b5bb48ba.zip
[SPARK-2960][DEPLOY] Support executing Spark from symlinks (reopen)
This PR is based on the work of roji to support running Spark scripts from symlinks. Thanks for the great work roji . Would you mind taking a look at this PR, thanks a lot. For releases like HDP and others, normally it will expose the Spark executables as symlinks and put in `PATH`, but current Spark's scripts do not support finding real path from symlink recursively, this will make spark fail to execute from symlink. This PR try to solve this issue by finding the absolute path from symlink. Instead of using `readlink -f` like what this PR (https://github.com/apache/spark/pull/2386) implemented is that `-f` is not support for Mac, so here manually seeking the path through loop. I've tested with Mac and Linux (Cent OS), looks fine. This PR did not fix the scripts under `sbin` folder, not sure if it needs to be fixed also? Please help to review, any comment is greatly appreciated. Author: jerryshao <sshao@hortonworks.com> Author: Shay Rojansky <roji@roji.org> Closes #8669 from jerryshao/SPARK-2960.
Diffstat (limited to 'sbin')
-rwxr-xr-xsbin/slaves.sh9
-rwxr-xr-xsbin/spark-config.sh23
-rwxr-xr-xsbin/spark-daemon.sh23
-rwxr-xr-xsbin/spark-daemons.sh9
-rwxr-xr-xsbin/start-all.sh11
-rwxr-xr-xsbin/start-history-server.sh11
-rwxr-xr-xsbin/start-master.sh17
-rwxr-xr-xsbin/start-mesos-dispatcher.sh11
-rwxr-xr-xsbin/start-mesos-shuffle-service.sh11
-rwxr-xr-xsbin/start-shuffle-service.sh11
-rwxr-xr-xsbin/start-slave.sh18
-rwxr-xr-xsbin/start-slaves.sh19
-rwxr-xr-xsbin/start-thriftserver.sh11
-rwxr-xr-xsbin/stop-all.sh14
-rwxr-xr-xsbin/stop-history-server.sh7
-rwxr-xr-xsbin/stop-master.sh13
-rwxr-xr-xsbin/stop-mesos-dispatcher.sh9
-rwxr-xr-xsbin/stop-mesos-shuffle-service.sh7
-rwxr-xr-xsbin/stop-shuffle-service.sh7
-rwxr-xr-xsbin/stop-slave.sh15
-rwxr-xr-xsbin/stop-slaves.sh15
-rwxr-xr-xsbin/stop-thriftserver.sh7
22 files changed, 143 insertions, 135 deletions
diff --git a/sbin/slaves.sh b/sbin/slaves.sh
index cdad47ee2e..c971aa3296 100755
--- a/sbin/slaves.sh
+++ b/sbin/slaves.sh
@@ -36,10 +36,11 @@ if [ $# -le 0 ]; then
exit 1
fi
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
# If the slaves file is specified in the command line,
# then it takes precedence over the definition in
@@ -65,7 +66,7 @@ then
shift
fi
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
if [ "$HOSTLIST" = "" ]; then
if [ "$SPARK_SLAVES" = "" ]; then
diff --git a/sbin/spark-config.sh b/sbin/spark-config.sh
index e6bf544c14..d8d9d00d64 100755
--- a/sbin/spark-config.sh
+++ b/sbin/spark-config.sh
@@ -19,21 +19,12 @@
# should not be executable directly
# also should not be passed any arguments, since we need original $*
-# resolve links - $0 may be a softlink
-this="${BASH_SOURCE:-$0}"
-common_bin="$(cd -P -- "$(dirname -- "$this")" && pwd -P)"
-script="$(basename -- "$this")"
-this="$common_bin/$script"
+# symlink and absolute path should rely on SPARK_HOME to resolve
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
-# convert relative path to absolute path
-config_bin="`dirname "$this"`"
-script="`basename "$this"`"
-config_bin="`cd "$config_bin"; pwd`"
-this="$config_bin/$script"
-
-export SPARK_PREFIX="`dirname "$this"`"/..
-export SPARK_HOME="${SPARK_PREFIX}"
-export SPARK_CONF_DIR="${SPARK_CONF_DIR:-"$SPARK_HOME/conf"}"
+export SPARK_CONF_DIR="${SPARK_CONF_DIR:-"${SPARK_HOME}/conf"}"
# Add the PySpark classes to the PYTHONPATH:
-export PYTHONPATH="$SPARK_HOME/python:$PYTHONPATH"
-export PYTHONPATH="$SPARK_HOME/python/lib/py4j-0.9-src.zip:$PYTHONPATH"
+export PYTHONPATH="${SPARK_HOME}/python:${PYTHONPATH}"
+export PYTHONPATH="${SPARK_HOME}/python/lib/py4j-0.9-src.zip:${PYTHONPATH}"
diff --git a/sbin/spark-daemon.sh b/sbin/spark-daemon.sh
index 0fbe795822..6ab57df409 100755
--- a/sbin/spark-daemon.sh
+++ b/sbin/spark-daemon.sh
@@ -37,10 +37,11 @@ if [ $# -le 1 ]; then
exit 1
fi
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
# get arguments
@@ -86,7 +87,7 @@ spark_rotate_log ()
fi
}
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
if [ "$SPARK_IDENT_STRING" = "" ]; then
export SPARK_IDENT_STRING="$USER"
@@ -97,7 +98,7 @@ export SPARK_PRINT_LAUNCH_COMMAND="1"
# get log directory
if [ "$SPARK_LOG_DIR" = "" ]; then
- export SPARK_LOG_DIR="$SPARK_HOME/logs"
+ export SPARK_LOG_DIR="${SPARK_HOME}/logs"
fi
mkdir -p "$SPARK_LOG_DIR"
touch "$SPARK_LOG_DIR"/.spark_test > /dev/null 2>&1
@@ -137,7 +138,7 @@ run_command() {
if [ "$SPARK_MASTER" != "" ]; then
echo rsync from "$SPARK_MASTER"
- rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' --exclude='contrib/hod/logs/*' "$SPARK_MASTER/" "$SPARK_HOME"
+ rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' --exclude='contrib/hod/logs/*' "$SPARK_MASTER/" "${SPARK_HOME}"
fi
spark_rotate_log "$log"
@@ -145,12 +146,12 @@ run_command() {
case "$mode" in
(class)
- nohup nice -n "$SPARK_NICENESS" "$SPARK_PREFIX"/bin/spark-class $command "$@" >> "$log" 2>&1 < /dev/null &
+ nohup nice -n "$SPARK_NICENESS" "${SPARK_HOME}"/bin/spark-class $command "$@" >> "$log" 2>&1 < /dev/null &
newpid="$!"
;;
(submit)
- nohup nice -n "$SPARK_NICENESS" "$SPARK_PREFIX"/bin/spark-submit --class $command "$@" >> "$log" 2>&1 < /dev/null &
+ nohup nice -n "$SPARK_NICENESS" "${SPARK_HOME}"/bin/spark-submit --class $command "$@" >> "$log" 2>&1 < /dev/null &
newpid="$!"
;;
@@ -205,13 +206,13 @@ case $option in
else
echo $pid file is present but $command not running
exit 1
- fi
+ fi
else
echo $command not running.
exit 2
- fi
+ fi
;;
-
+
(*)
echo $usage
exit 1
diff --git a/sbin/spark-daemons.sh b/sbin/spark-daemons.sh
index 5d9f2bb51c..dec2f4432d 100755
--- a/sbin/spark-daemons.sh
+++ b/sbin/spark-daemons.sh
@@ -27,9 +27,10 @@ if [ $# -le 1 ]; then
exit 1
fi
-sbin=`dirname "$0"`
-sbin=`cd "$sbin"; pwd`
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
-exec "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/spark-daemon.sh" "$@"
+exec "${SPARK_HOME}/sbin/slaves.sh" cd "${SPARK_HOME}" \; "${SPARK_HOME}/sbin/spark-daemon.sh" "$@"
diff --git a/sbin/start-all.sh b/sbin/start-all.sh
index 1baf57cea0..6217f9bf28 100755
--- a/sbin/start-all.sh
+++ b/sbin/start-all.sh
@@ -21,8 +21,9 @@
# Starts the master on this node.
# Starts a worker on each node specified in conf/slaves
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
TACHYON_STR=""
@@ -36,10 +37,10 @@ shift
done
# Load the Spark configuration
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
# Start Master
-"$sbin"/start-master.sh $TACHYON_STR
+"${SPARK_HOME}/sbin"/start-master.sh $TACHYON_STR
# Start Workers
-"$sbin"/start-slaves.sh $TACHYON_STR
+"${SPARK_HOME}/sbin"/start-slaves.sh $TACHYON_STR
diff --git a/sbin/start-history-server.sh b/sbin/start-history-server.sh
index 9034e5715c..6851d99b7e 100755
--- a/sbin/start-history-server.sh
+++ b/sbin/start-history-server.sh
@@ -24,10 +24,11 @@
# Use the SPARK_HISTORY_OPTS environment variable to set history server configuration.
#
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
-. "$sbin/spark-config.sh"
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
-exec "$sbin"/spark-daemon.sh start org.apache.spark.deploy.history.HistoryServer 1 $@
+exec "${SPARK_HOME}/sbin"/spark-daemon.sh start org.apache.spark.deploy.history.HistoryServer 1 $@
diff --git a/sbin/start-master.sh b/sbin/start-master.sh
index a7f5d5702f..c20e19a841 100755
--- a/sbin/start-master.sh
+++ b/sbin/start-master.sh
@@ -19,8 +19,9 @@
# Starts the master on the machine this script is executed on.
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
ORIGINAL_ARGS="$@"
@@ -39,9 +40,9 @@ case $1 in
shift
done
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
if [ "$SPARK_MASTER_PORT" = "" ]; then
SPARK_MASTER_PORT=7077
@@ -55,12 +56,12 @@ if [ "$SPARK_MASTER_WEBUI_PORT" = "" ]; then
SPARK_MASTER_WEBUI_PORT=8080
fi
-"$sbin"/spark-daemon.sh start org.apache.spark.deploy.master.Master 1 \
+"${SPARK_HOME}/sbin"/spark-daemon.sh start org.apache.spark.deploy.master.Master 1 \
--ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT \
$ORIGINAL_ARGS
if [ "$START_TACHYON" == "true" ]; then
- "$sbin"/../tachyon/bin/tachyon bootstrap-conf $SPARK_MASTER_IP
- "$sbin"/../tachyon/bin/tachyon format -s
- "$sbin"/../tachyon/bin/tachyon-start.sh master
+ "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon bootstrap-conf $SPARK_MASTER_IP
+ "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon format -s
+ "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon-start.sh master
fi
diff --git a/sbin/start-mesos-dispatcher.sh b/sbin/start-mesos-dispatcher.sh
index ef1fc573d5..4777e1668c 100755
--- a/sbin/start-mesos-dispatcher.sh
+++ b/sbin/start-mesos-dispatcher.sh
@@ -21,12 +21,13 @@
# Rest server to handle driver requests for Mesos cluster mode.
# Only one cluster dispatcher is needed per Mesos cluster.
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
if [ "$SPARK_MESOS_DISPATCHER_PORT" = "" ]; then
SPARK_MESOS_DISPATCHER_PORT=7077
@@ -37,4 +38,4 @@ if [ "$SPARK_MESOS_DISPATCHER_HOST" = "" ]; then
fi
-"$sbin"/spark-daemon.sh start org.apache.spark.deploy.mesos.MesosClusterDispatcher 1 --host $SPARK_MESOS_DISPATCHER_HOST --port $SPARK_MESOS_DISPATCHER_PORT "$@"
+"${SPARK_HOME}/sbin"/spark-daemon.sh start org.apache.spark.deploy.mesos.MesosClusterDispatcher 1 --host $SPARK_MESOS_DISPATCHER_HOST --port $SPARK_MESOS_DISPATCHER_PORT "$@"
diff --git a/sbin/start-mesos-shuffle-service.sh b/sbin/start-mesos-shuffle-service.sh
index 64580762c5..1845845676 100755
--- a/sbin/start-mesos-shuffle-service.sh
+++ b/sbin/start-mesos-shuffle-service.sh
@@ -26,10 +26,11 @@
# Use the SPARK_SHUFFLE_OPTS environment variable to set shuffle service configuration.
#
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
-. "$sbin/spark-config.sh"
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
-exec "$sbin"/spark-daemon.sh start org.apache.spark.deploy.mesos.MesosExternalShuffleService 1
+exec "${SPARK_HOME}/sbin"/spark-daemon.sh start org.apache.spark.deploy.mesos.MesosExternalShuffleService 1
diff --git a/sbin/start-shuffle-service.sh b/sbin/start-shuffle-service.sh
index 4fddcf7f95..793e165be6 100755
--- a/sbin/start-shuffle-service.sh
+++ b/sbin/start-shuffle-service.sh
@@ -24,10 +24,11 @@
# Use the SPARK_SHUFFLE_OPTS environment variable to set shuffle server configuration.
#
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
-. "$sbin/spark-config.sh"
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
-exec "$sbin"/spark-daemon.sh start org.apache.spark.deploy.ExternalShuffleService 1
+exec "${SPARK_HOME}/sbin"/spark-daemon.sh start org.apache.spark.deploy.ExternalShuffleService 1
diff --git a/sbin/start-slave.sh b/sbin/start-slave.sh
index 4c919ff76a..21455648d1 100755
--- a/sbin/start-slave.sh
+++ b/sbin/start-slave.sh
@@ -21,14 +21,14 @@
#
# Environment Variables
#
-# SPARK_WORKER_INSTANCES The number of worker instances to run on this
+# SPARK_WORKER_INSTANCES The number of worker instances to run on this
# slave. Default is 1.
-# SPARK_WORKER_PORT The base port number for the first worker. If set,
+# SPARK_WORKER_PORT The base port number for the first worker. If set,
# subsequent workers will increment this number. If
# unset, Spark will find a valid port number, but
# with no guarantee of a predictable pattern.
# SPARK_WORKER_WEBUI_PORT The base port for the web interface of the first
-# worker. Subsequent workers will increment this
+# worker. Subsequent workers will increment this
# number. Default is 8081.
usage="Usage: start-slave.sh <spark-master-URL> where <spark-master-URL> is like spark://localhost:7077"
@@ -39,12 +39,13 @@ if [ $# -lt 1 ]; then
exit 1
fi
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
# First argument should be the master; we need to store it aside because we may
# need to insert arguments between it and the other arguments
@@ -71,7 +72,7 @@ function start_instance {
fi
WEBUI_PORT=$(( $SPARK_WORKER_WEBUI_PORT + $WORKER_NUM - 1 ))
- "$sbin"/spark-daemon.sh start org.apache.spark.deploy.worker.Worker $WORKER_NUM \
+ "${SPARK_HOME}/sbin"/spark-daemon.sh start org.apache.spark.deploy.worker.Worker $WORKER_NUM \
--webui-port "$WEBUI_PORT" $PORT_FLAG $PORT_NUM $MASTER "$@"
}
@@ -82,4 +83,3 @@ else
start_instance $(( 1 + $i )) "$@"
done
fi
-
diff --git a/sbin/start-slaves.sh b/sbin/start-slaves.sh
index 24d6268815..51ca81e053 100755
--- a/sbin/start-slaves.sh
+++ b/sbin/start-slaves.sh
@@ -19,16 +19,16 @@
# Starts a slave instance on each machine specified in the conf/slaves file.
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
-
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
START_TACHYON=false
while (( "$#" )); do
case $1 in
--with-tachyon)
- if [ ! -e "$sbin"/../tachyon/bin/tachyon ]; then
+ if [ ! -e "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon ]; then
echo "Error: --with-tachyon specified, but tachyon not found."
exit -1
fi
@@ -38,9 +38,8 @@ case $1 in
shift
done
-. "$sbin/spark-config.sh"
-
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
# Find the port number for the master
if [ "$SPARK_MASTER_PORT" = "" ]; then
@@ -52,11 +51,11 @@ if [ "$SPARK_MASTER_IP" = "" ]; then
fi
if [ "$START_TACHYON" == "true" ]; then
- "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin"/../tachyon/bin/tachyon bootstrap-conf "$SPARK_MASTER_IP"
+ "${SPARK_HOME}/sbin/slaves.sh" cd "${SPARK_HOME}" \; "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon bootstrap-conf "$SPARK_MASTER_IP"
# set -t so we can call sudo
- SPARK_SSH_OPTS="-o StrictHostKeyChecking=no -t" "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/../tachyon/bin/tachyon-start.sh" worker SudoMount \; sleep 1
+ SPARK_SSH_OPTS="-o StrictHostKeyChecking=no -t" "${SPARK_HOME}/sbin/slaves.sh" cd "${SPARK_HOME}" \; "${SPARK_HOME}/tachyon/bin/tachyon-start.sh" worker SudoMount \; sleep 1
fi
# Launch the slaves
-"$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/start-slave.sh" "spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT"
+"${SPARK_HOME}/sbin/slaves.sh" cd "${SPARK_HOME}" \; "${SPARK_HOME}/sbin/start-slave.sh" "spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT"
diff --git a/sbin/start-thriftserver.sh b/sbin/start-thriftserver.sh
index 5b0aeb177f..ad7e7c5277 100755
--- a/sbin/start-thriftserver.sh
+++ b/sbin/start-thriftserver.sh
@@ -23,8 +23,9 @@
# Enter posix mode for bash
set -o posix
-# Figure out where Spark is installed
-FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
# NOTE: This exact class name is matched downstream by SparkSubmit.
# Any changes need to be reflected there.
@@ -39,10 +40,10 @@ function usage {
pattern+="\|======="
pattern+="\|--help"
- "$FWDIR"/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
+ "${SPARK_HOME}"/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
echo
echo "Thrift server options:"
- "$FWDIR"/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
+ "${SPARK_HOME}"/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
}
if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
@@ -52,4 +53,4 @@ fi
export SUBMIT_USAGE_FUNCTION=usage
-exec "$FWDIR"/sbin/spark-daemon.sh submit $CLASS 1 "$@"
+exec "${SPARK_HOME}"/sbin/spark-daemon.sh submit $CLASS 1 "$@"
diff --git a/sbin/stop-all.sh b/sbin/stop-all.sh
index 1a9abe07db..4e476ca05c 100755
--- a/sbin/stop-all.sh
+++ b/sbin/stop-all.sh
@@ -20,23 +20,23 @@
# Stop all spark daemons.
# Run this on the master node.
-
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
# Load the Spark configuration
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
# Stop the slaves, then the master
-"$sbin"/stop-slaves.sh
-"$sbin"/stop-master.sh
+"${SPARK_HOME}/sbin"/stop-slaves.sh
+"${SPARK_HOME}/sbin"/stop-master.sh
if [ "$1" == "--wait" ]
then
printf "Waiting for workers to shut down..."
while true
do
- running=`$sbin/slaves.sh ps -ef | grep -v grep | grep deploy.worker.Worker`
+ running=`${SPARK_HOME}/sbin/slaves.sh ps -ef | grep -v grep | grep deploy.worker.Worker`
if [ -z "$running" ]
then
printf "\nAll workers successfully shut down.\n"
diff --git a/sbin/stop-history-server.sh b/sbin/stop-history-server.sh
index 6e60563595..14e3af4be9 100755
--- a/sbin/stop-history-server.sh
+++ b/sbin/stop-history-server.sh
@@ -19,7 +19,8 @@
# Stops the history server on the machine this script is executed on.
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
-"$sbin"/spark-daemon.sh stop org.apache.spark.deploy.history.HistoryServer 1
+"${SPARK_HOME}/sbin/spark-daemon.sh" stop org.apache.spark.deploy.history.HistoryServer 1
diff --git a/sbin/stop-master.sh b/sbin/stop-master.sh
index 729702d921..e57962bb35 100755
--- a/sbin/stop-master.sh
+++ b/sbin/stop-master.sh
@@ -19,13 +19,14 @@
# Stops the master on the machine this script is executed on.
-sbin=`dirname "$0"`
-sbin=`cd "$sbin"; pwd`
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
-"$sbin"/spark-daemon.sh stop org.apache.spark.deploy.master.Master 1
+"${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.deploy.master.Master 1
-if [ -e "$sbin"/../tachyon/bin/tachyon ]; then
- "$sbin"/../tachyon/bin/tachyon killAll tachyon.master.Master
+if [ -e "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon ]; then
+ "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon killAll tachyon.master.Master
fi
diff --git a/sbin/stop-mesos-dispatcher.sh b/sbin/stop-mesos-dispatcher.sh
index cb65d95b5e..5c0b4e051d 100755
--- a/sbin/stop-mesos-dispatcher.sh
+++ b/sbin/stop-mesos-dispatcher.sh
@@ -18,10 +18,11 @@
#
# Stop the Mesos Cluster dispatcher on the machine this script is executed on.
-sbin=`dirname "$0"`
-sbin=`cd "$sbin"; pwd`
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
-"$sbin"/spark-daemon.sh stop org.apache.spark.deploy.mesos.MesosClusterDispatcher 1
+"${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.deploy.mesos.MesosClusterDispatcher 1
diff --git a/sbin/stop-mesos-shuffle-service.sh b/sbin/stop-mesos-shuffle-service.sh
index 0e965d5ec5..d23cad375e 100755
--- a/sbin/stop-mesos-shuffle-service.sh
+++ b/sbin/stop-mesos-shuffle-service.sh
@@ -19,7 +19,8 @@
# Stops the Mesos external shuffle service on the machine this script is executed on.
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
-"$sbin"/spark-daemon.sh stop org.apache.spark.deploy.mesos.MesosExternalShuffleService 1
+"${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.deploy.mesos.MesosExternalShuffleService 1
diff --git a/sbin/stop-shuffle-service.sh b/sbin/stop-shuffle-service.sh
index 4cb6891ae2..50d69cf34e 100755
--- a/sbin/stop-shuffle-service.sh
+++ b/sbin/stop-shuffle-service.sh
@@ -19,7 +19,8 @@
# Stops the external shuffle service on the machine this script is executed on.
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
-"$sbin"/spark-daemon.sh stop org.apache.spark.deploy.ExternalShuffleService 1
+"${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.deploy.ExternalShuffleService 1
diff --git a/sbin/stop-slave.sh b/sbin/stop-slave.sh
index 3d1da5b254..685bcf59b3 100755
--- a/sbin/stop-slave.sh
+++ b/sbin/stop-slave.sh
@@ -21,23 +21,24 @@
#
# Environment variables
#
-# SPARK_WORKER_INSTANCES The number of worker instances that should be
+# SPARK_WORKER_INSTANCES The number of worker instances that should be
# running on this slave. Default is 1.
# Usage: stop-slave.sh
# Stops all slaves on this worker machine
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
if [ "$SPARK_WORKER_INSTANCES" = "" ]; then
- "$sbin"/spark-daemon.sh stop org.apache.spark.deploy.worker.Worker 1
+ "${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.deploy.worker.Worker 1
else
for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do
- "$sbin"/spark-daemon.sh stop org.apache.spark.deploy.worker.Worker $(( $i + 1 ))
+ "${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.deploy.worker.Worker $(( $i + 1 ))
done
fi
diff --git a/sbin/stop-slaves.sh b/sbin/stop-slaves.sh
index 54c9bd4680..6395637762 100755
--- a/sbin/stop-slaves.sh
+++ b/sbin/stop-slaves.sh
@@ -17,16 +17,17 @@
# limitations under the License.
#
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
# do before the below calls as they exec
-if [ -e "$sbin"/../tachyon/bin/tachyon ]; then
- "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin"/../tachyon/bin/tachyon killAll tachyon.worker.Worker
+if [ -e "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon ]; then
+ "${SPARK_HOME}/sbin/slaves.sh" cd "${SPARK_HOME}" \; "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon killAll tachyon.worker.Worker
fi
-"$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin"/stop-slave.sh
+"${SPARK_HOME}/sbin/slaves.sh" cd "${SPARK_HOME}" \; "${SPARK_HOME}/sbin"/stop-slave.sh
diff --git a/sbin/stop-thriftserver.sh b/sbin/stop-thriftserver.sh
index 4031a00d4a..cf45058f88 100755
--- a/sbin/stop-thriftserver.sh
+++ b/sbin/stop-thriftserver.sh
@@ -19,7 +19,8 @@
# Stops the thrift server on the machine this script is executed on.
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+ export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
-"$sbin"/spark-daemon.sh stop org.apache.spark.sql.hive.thriftserver.HiveThriftServer2 1
+"${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.sql.hive.thriftserver.HiveThriftServer2 1