aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rwxr-xr-xspark-shell25
1 files changed, 13 insertions, 12 deletions
diff --git a/spark-shell b/spark-shell
index ea67a3e6b8..a8e72143fb 100755
--- a/spark-shell
+++ b/spark-shell
@@ -1,24 +1,14 @@
#!/bin/bash --posix
#
# Shell script for starting the Spark Shell REPL
+# Note that it will set MASTER to spark://${SPARK_MASTER_IP}:${SPARK_MASTER_PORT}
+# if those two env vars are set in spark-env.sh but MASTER is not.
# Options:
-# -m Set MASTER to spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT
# -c <cores> Set the number of cores for REPL to use
#
FWDIR="`dirname $0`"
for o in "$@"; do
- if [ "$1" = "-m" -o "$1" = "--master" ]; then
- shift
- if [ -e "$FWDIR/conf/spark-env.sh" ]; then
- . "$FWDIR/conf/spark-env.sh"
- fi
- if [ -z "$MASTER" ]; then
- MASTER="spark://${SPARK_MASTER_IP}:${SPARK_MASTER_PORT}"
- fi
- export MASTER
- fi
-
if [ "$1" = "-c" -o "$1" = "--cores" ]; then
shift
if [ -n "$1" ]; then
@@ -28,6 +18,17 @@ for o in "$@"; do
fi
done
+# Set MASTER from spark-env if possible
+if [ -z "$MASTER" ]; then
+ if [ -e "$FWDIR/conf/spark-env.sh" ]; then
+ . "$FWDIR/conf/spark-env.sh"
+ fi
+ if [[ "x" != "x$SPARK_MASTER_IP" && "y" != "y$SPARK_MASTER_PORT" ]]; then
+ MASTER="spark://${SPARK_MASTER_IP}:${SPARK_MASTER_PORT}"
+ export MASTER
+ fi
+fi
+
# Copy restore-TTY-on-exit functions from Scala script so spark-shell exits properly even in
# binary distribution of Spark where Scala is not installed
exit_status=127