aboutsummaryrefslogtreecommitdiff
path: root/sbin
diff options
context:
space:
mode:
authorPrashant Sharma <prashant.s@imaginea.com>2014-09-08 10:24:15 -0700
committerAndrew Or <andrewor14@gmail.com>2014-09-08 10:24:15 -0700
commite16a8e7db5a3b1065b14baf89cb723a59b99226b (patch)
tree09d5b9bd510325047aa20f62f215184e46367bdb /sbin
parent711356b422c66e2a80377a9f43fce97282460520 (diff)
downloadspark-e16a8e7db5a3b1065b14baf89cb723a59b99226b.tar.gz
spark-e16a8e7db5a3b1065b14baf89cb723a59b99226b.tar.bz2
spark-e16a8e7db5a3b1065b14baf89cb723a59b99226b.zip
SPARK-3337 Paranoid quoting in shell to allow install dirs with spaces within.
... Tested ! TBH, it isn't a great idea to have directory with spaces within. Because emacs doesn't like it then hadoop doesn't like it. and so on... Author: Prashant Sharma <prashant.s@imaginea.com> Closes #2229 from ScrapCodes/SPARK-3337/quoting-shell-scripts and squashes the following commits: d4ad660 [Prashant Sharma] SPARK-3337 Paranoid quoting in shell to allow install dirs with spaces within.
Diffstat (limited to 'sbin')
-rwxr-xr-xsbin/slaves.sh12
-rwxr-xr-xsbin/spark-config.sh16
-rwxr-xr-xsbin/spark-daemon.sh20
-rwxr-xr-xsbin/spark-executor8
-rwxr-xr-xsbin/start-all.sh4
-rwxr-xr-xsbin/start-history-server.sh4
-rwxr-xr-xsbin/start-master.sh4
-rwxr-xr-xsbin/start-slave.sh4
-rwxr-xr-xsbin/start-slaves.sh12
-rwxr-xr-xsbin/start-thriftserver.sh8
-rwxr-xr-xsbin/stop-all.sh4
-rwxr-xr-xsbin/stop-history-server.sh4
12 files changed, 50 insertions, 50 deletions
diff --git a/sbin/slaves.sh b/sbin/slaves.sh
index f89547fef9..1d4dc5edf9 100755
--- a/sbin/slaves.sh
+++ b/sbin/slaves.sh
@@ -36,29 +36,29 @@ if [ $# -le 0 ]; then
exit 1
fi
-sbin=`dirname "$0"`
-sbin=`cd "$sbin"; pwd`
+sbin="`dirname "$0"`"
+sbin="`cd "$sbin"; pwd`"
. "$sbin/spark-config.sh"
# If the slaves file is specified in the command line,
# then it takes precedence over the definition in
# spark-env.sh. Save it here.
-HOSTLIST=$SPARK_SLAVES
+HOSTLIST="$SPARK_SLAVES"
# Check if --config is passed as an argument. It is an optional parameter.
# Exit if the argument is not a directory.
if [ "$1" == "--config" ]
then
shift
- conf_dir=$1
+ conf_dir="$1"
if [ ! -d "$conf_dir" ]
then
echo "ERROR : $conf_dir is not a directory"
echo $usage
exit 1
else
- export SPARK_CONF_DIR=$conf_dir
+ export SPARK_CONF_DIR="$conf_dir"
fi
shift
fi
@@ -79,7 +79,7 @@ if [ "$SPARK_SSH_OPTS" = "" ]; then
fi
for slave in `cat "$HOSTLIST"|sed "s/#.*$//;/^$/d"`; do
- ssh $SPARK_SSH_OPTS $slave $"${@// /\\ }" \
+ ssh $SPARK_SSH_OPTS "$slave" $"${@// /\\ }" \
2>&1 | sed "s/^/$slave: /" &
if [ "$SPARK_SLAVE_SLEEP" != "" ]; then
sleep $SPARK_SLAVE_SLEEP
diff --git a/sbin/spark-config.sh b/sbin/spark-config.sh
index 5c87da5815..2718d6cba1 100755
--- a/sbin/spark-config.sh
+++ b/sbin/spark-config.sh
@@ -21,19 +21,19 @@
# resolve links - $0 may be a softlink
this="${BASH_SOURCE-$0}"
-common_bin=$(cd -P -- "$(dirname -- "$this")" && pwd -P)
+common_bin="$(cd -P -- "$(dirname -- "$this")" && pwd -P)"
script="$(basename -- "$this")"
this="$common_bin/$script"
# convert relative path to absolute path
-config_bin=`dirname "$this"`
-script=`basename "$this"`
-config_bin=`cd "$config_bin"; pwd`
+config_bin="`dirname "$this"`"
+script="`basename "$this"`"
+config_bin="`cd "$config_bin"; pwd`"
this="$config_bin/$script"
-export SPARK_PREFIX=`dirname "$this"`/..
-export SPARK_HOME=${SPARK_PREFIX}
+export SPARK_PREFIX="`dirname "$this"`"/..
+export SPARK_HOME="${SPARK_PREFIX}"
export SPARK_CONF_DIR="$SPARK_HOME/conf"
# Add the PySpark classes to the PYTHONPATH:
-export PYTHONPATH=$SPARK_HOME/python:$PYTHONPATH
-export PYTHONPATH=$SPARK_HOME/python/lib/py4j-0.8.2.1-src.zip:$PYTHONPATH
+export PYTHONPATH="$SPARK_HOME/python:$PYTHONPATH"
+export PYTHONPATH="$SPARK_HOME/python/lib/py4j-0.8.2.1-src.zip:$PYTHONPATH"
diff --git a/sbin/spark-daemon.sh b/sbin/spark-daemon.sh
index 9032f23ea8..bd476b400e 100755
--- a/sbin/spark-daemon.sh
+++ b/sbin/spark-daemon.sh
@@ -37,8 +37,8 @@ if [ $# -le 1 ]; then
exit 1
fi
-sbin=`dirname "$0"`
-sbin=`cd "$sbin"; pwd`
+sbin="`dirname "$0"`"
+sbin="`cd "$sbin"; pwd`"
. "$sbin/spark-config.sh"
@@ -50,14 +50,14 @@ sbin=`cd "$sbin"; pwd`
if [ "$1" == "--config" ]
then
shift
- conf_dir=$1
+ conf_dir="$1"
if [ ! -d "$conf_dir" ]
then
echo "ERROR : $conf_dir is not a directory"
echo $usage
exit 1
else
- export SPARK_CONF_DIR=$conf_dir
+ export SPARK_CONF_DIR="$conf_dir"
fi
shift
fi
@@ -100,12 +100,12 @@ if [ "$SPARK_LOG_DIR" = "" ]; then
export SPARK_LOG_DIR="$SPARK_HOME/logs"
fi
mkdir -p "$SPARK_LOG_DIR"
-touch $SPARK_LOG_DIR/.spark_test > /dev/null 2>&1
+touch "$SPARK_LOG_DIR"/.spark_test > /dev/null 2>&1
TEST_LOG_DIR=$?
if [ "${TEST_LOG_DIR}" = "0" ]; then
- rm -f $SPARK_LOG_DIR/.spark_test
+ rm -f "$SPARK_LOG_DIR"/.spark_test
else
- chown $SPARK_IDENT_STRING $SPARK_LOG_DIR
+ chown "$SPARK_IDENT_STRING" "$SPARK_LOG_DIR"
fi
if [ "$SPARK_PID_DIR" = "" ]; then
@@ -113,8 +113,8 @@ if [ "$SPARK_PID_DIR" = "" ]; then
fi
# some variables
-log=$SPARK_LOG_DIR/spark-$SPARK_IDENT_STRING-$command-$instance-$HOSTNAME.out
-pid=$SPARK_PID_DIR/spark-$SPARK_IDENT_STRING-$command-$instance.pid
+log="$SPARK_LOG_DIR/spark-$SPARK_IDENT_STRING-$command-$instance-$HOSTNAME.out"
+pid="$SPARK_PID_DIR/spark-$SPARK_IDENT_STRING-$command-$instance.pid"
# Set default scheduling priority
if [ "$SPARK_NICENESS" = "" ]; then
@@ -136,7 +136,7 @@ case $startStop in
fi
if [ "$SPARK_MASTER" != "" ]; then
- echo rsync from $SPARK_MASTER
+ echo rsync from "$SPARK_MASTER"
rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' --exclude='contrib/hod/logs/*' $SPARK_MASTER/ "$SPARK_HOME"
fi
diff --git a/sbin/spark-executor b/sbin/spark-executor
index 3621321a9b..674ce906d9 100755
--- a/sbin/spark-executor
+++ b/sbin/spark-executor
@@ -17,10 +17,10 @@
# limitations under the License.
#
-FWDIR="$(cd `dirname $0`/..; pwd)"
+FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
-export PYTHONPATH=$FWDIR/python:$PYTHONPATH
-export PYTHONPATH=$FWDIR/python/lib/py4j-0.8.2.1-src.zip:$PYTHONPATH
+export PYTHONPATH="$FWDIR/python:$PYTHONPATH"
+export PYTHONPATH="$FWDIR/python/lib/py4j-0.8.2.1-src.zip:$PYTHONPATH"
echo "Running spark-executor with framework dir = $FWDIR"
-exec $FWDIR/bin/spark-class org.apache.spark.executor.MesosExecutorBackend
+exec "$FWDIR"/bin/spark-class org.apache.spark.executor.MesosExecutorBackend
diff --git a/sbin/start-all.sh b/sbin/start-all.sh
index 5c89ab4d86..1baf57cea0 100755
--- a/sbin/start-all.sh
+++ b/sbin/start-all.sh
@@ -21,8 +21,8 @@
# Starts the master on this node.
# Starts a worker on each node specified in conf/slaves
-sbin=`dirname "$0"`
-sbin=`cd "$sbin"; pwd`
+sbin="`dirname "$0"`"
+sbin="`cd "$sbin"; pwd`"
TACHYON_STR=""
diff --git a/sbin/start-history-server.sh b/sbin/start-history-server.sh
index 580ab471b8..7172ad15d8 100755
--- a/sbin/start-history-server.sh
+++ b/sbin/start-history-server.sh
@@ -24,8 +24,8 @@
# Use the SPARK_HISTORY_OPTS environment variable to set history server configuration.
#
-sbin=`dirname "$0"`
-sbin=`cd "$sbin"; pwd`
+sbin="`dirname "$0"`"
+sbin="`cd "$sbin"; pwd`"
. "$sbin/spark-config.sh"
. "$SPARK_PREFIX/bin/load-spark-env.sh"
diff --git a/sbin/start-master.sh b/sbin/start-master.sh
index c5c02491f7..17fff58f4f 100755
--- a/sbin/start-master.sh
+++ b/sbin/start-master.sh
@@ -19,8 +19,8 @@
# Starts the master on the machine this script is executed on.
-sbin=`dirname "$0"`
-sbin=`cd "$sbin"; pwd`
+sbin="`dirname "$0"`"
+sbin="`cd "$sbin"; pwd`"
START_TACHYON=false
diff --git a/sbin/start-slave.sh b/sbin/start-slave.sh
index b563400dc2..2fc35309f4 100755
--- a/sbin/start-slave.sh
+++ b/sbin/start-slave.sh
@@ -20,7 +20,7 @@
# Usage: start-slave.sh <worker#> <master-spark-URL>
# where <master-spark-URL> is like "spark://localhost:7077"
-sbin=`dirname "$0"`
-sbin=`cd "$sbin"; pwd`
+sbin="`dirname "$0"`"
+sbin="`cd "$sbin"; pwd`"
"$sbin"/spark-daemon.sh start org.apache.spark.deploy.worker.Worker "$@"
diff --git a/sbin/start-slaves.sh b/sbin/start-slaves.sh
index 4912d0c0c7..ba1a84abc1 100755
--- a/sbin/start-slaves.sh
+++ b/sbin/start-slaves.sh
@@ -17,8 +17,8 @@
# limitations under the License.
#
-sbin=`dirname "$0"`
-sbin=`cd "$sbin"; pwd`
+sbin="`dirname "$0"`"
+sbin="`cd "$sbin"; pwd`"
START_TACHYON=false
@@ -46,11 +46,11 @@ if [ "$SPARK_MASTER_PORT" = "" ]; then
fi
if [ "$SPARK_MASTER_IP" = "" ]; then
- SPARK_MASTER_IP=`hostname`
+ SPARK_MASTER_IP="`hostname`"
fi
if [ "$START_TACHYON" == "true" ]; then
- "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin"/../tachyon/bin/tachyon bootstrap-conf $SPARK_MASTER_IP
+ "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin"/../tachyon/bin/tachyon bootstrap-conf "$SPARK_MASTER_IP"
# set -t so we can call sudo
SPARK_SSH_OPTS="-o StrictHostKeyChecking=no -t" "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/../tachyon/bin/tachyon-start.sh" worker SudoMount \; sleep 1
@@ -58,12 +58,12 @@ fi
# Launch the slaves
if [ "$SPARK_WORKER_INSTANCES" = "" ]; then
- exec "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/start-slave.sh" 1 spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT
+ exec "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/start-slave.sh" 1 "spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT"
else
if [ "$SPARK_WORKER_WEBUI_PORT" = "" ]; then
SPARK_WORKER_WEBUI_PORT=8081
fi
for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do
- "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/start-slave.sh" $(( $i + 1 )) spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT --webui-port $(( $SPARK_WORKER_WEBUI_PORT + $i ))
+ "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/start-slave.sh" $(( $i + 1 )) "spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT" --webui-port $(( $SPARK_WORKER_WEBUI_PORT + $i ))
done
fi
diff --git a/sbin/start-thriftserver.sh b/sbin/start-thriftserver.sh
index c519a77df4..4ce40fe750 100755
--- a/sbin/start-thriftserver.sh
+++ b/sbin/start-thriftserver.sh
@@ -24,7 +24,7 @@
set -o posix
# Figure out where Spark is installed
-FWDIR="$(cd `dirname $0`/..; pwd)"
+FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
CLASS="org.apache.spark.sql.hive.thriftserver.HiveThriftServer2"
CLASS_NOT_FOUND_EXIT_STATUS=1
@@ -38,10 +38,10 @@ function usage {
pattern+="\|======="
pattern+="\|--help"
- $FWDIR/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
+ "$FWDIR"/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
echo
echo "Thrift server options:"
- $FWDIR/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
+ "$FWDIR"/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
}
if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
@@ -49,7 +49,7 @@ if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
exit 0
fi
-source $FWDIR/bin/utils.sh
+source "$FWDIR"/bin/utils.sh
SUBMIT_USAGE_FUNCTION=usage
gatherSparkSubmitOpts "$@"
diff --git a/sbin/stop-all.sh b/sbin/stop-all.sh
index 60b358d374..298c6a9859 100755
--- a/sbin/stop-all.sh
+++ b/sbin/stop-all.sh
@@ -21,8 +21,8 @@
# Run this on the master nde
-sbin=`dirname "$0"`
-sbin=`cd "$sbin"; pwd`
+sbin="`dirname "$0"`"
+sbin="`cd "$sbin"; pwd`"
# Load the Spark configuration
. "$sbin/spark-config.sh"
diff --git a/sbin/stop-history-server.sh b/sbin/stop-history-server.sh
index c0034ad641..6e60563595 100755
--- a/sbin/stop-history-server.sh
+++ b/sbin/stop-history-server.sh
@@ -19,7 +19,7 @@
# Stops the history server on the machine this script is executed on.
-sbin=`dirname "$0"`
-sbin=`cd "$sbin"; pwd`
+sbin="`dirname "$0"`"
+sbin="`cd "$sbin"; pwd`"
"$sbin"/spark-daemon.sh stop org.apache.spark.deploy.history.HistoryServer 1