aboutsummaryrefslogtreecommitdiff
path: root/bin/pyspark
diff options
context:
space:
mode:
authorPrashant Sharma <prashant.s@imaginea.com>2014-09-08 10:24:15 -0700
committerAndrew Or <andrewor14@gmail.com>2014-09-08 10:24:15 -0700
commite16a8e7db5a3b1065b14baf89cb723a59b99226b (patch)
tree09d5b9bd510325047aa20f62f215184e46367bdb /bin/pyspark
parent711356b422c66e2a80377a9f43fce97282460520 (diff)
downloadspark-e16a8e7db5a3b1065b14baf89cb723a59b99226b.tar.gz
spark-e16a8e7db5a3b1065b14baf89cb723a59b99226b.tar.bz2
spark-e16a8e7db5a3b1065b14baf89cb723a59b99226b.zip
SPARK-3337 Paranoid quoting in shell to allow install dirs with spaces within.
... Tested ! TBH, it isn't a great idea to have directory with spaces within. Because emacs doesn't like it then hadoop doesn't like it. and so on... Author: Prashant Sharma <prashant.s@imaginea.com> Closes #2229 from ScrapCodes/SPARK-3337/quoting-shell-scripts and squashes the following commits: d4ad660 [Prashant Sharma] SPARK-3337 Paranoid quoting in shell to allow install dirs with spaces within.
Diffstat (limited to 'bin/pyspark')
-rwxr-xr-xbin/pyspark20
1 files changed, 10 insertions, 10 deletions
diff --git a/bin/pyspark b/bin/pyspark
index 26a16dd600..5142411e36 100755
--- a/bin/pyspark
+++ b/bin/pyspark
@@ -18,18 +18,18 @@
#
# Figure out where Spark is installed
-FWDIR="$(cd `dirname $0`/..; pwd)"
+FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
# Export this as SPARK_HOME
export SPARK_HOME="$FWDIR"
-source $FWDIR/bin/utils.sh
+source "$FWDIR/bin/utils.sh"
SCALA_VERSION=2.10
function usage() {
echo "Usage: ./bin/pyspark [options]" 1>&2
- $FWDIR/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
+ "$FWDIR"/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
exit 0
}
@@ -48,7 +48,7 @@ if [ ! -f "$FWDIR/RELEASE" ]; then
fi
fi
-. $FWDIR/bin/load-spark-env.sh
+. "$FWDIR"/bin/load-spark-env.sh
# Figure out which Python executable to use
if [[ -z "$PYSPARK_PYTHON" ]]; then
@@ -57,12 +57,12 @@ fi
export PYSPARK_PYTHON
# Add the PySpark classes to the Python path:
-export PYTHONPATH=$SPARK_HOME/python/:$PYTHONPATH
-export PYTHONPATH=$SPARK_HOME/python/lib/py4j-0.8.2.1-src.zip:$PYTHONPATH
+export PYTHONPATH="$SPARK_HOME/python/:$PYTHONPATH"
+export PYTHONPATH="$SPARK_HOME/python/lib/py4j-0.8.2.1-src.zip:$PYTHONPATH"
# Load the PySpark shell.py script when ./pyspark is used interactively:
-export OLD_PYTHONSTARTUP=$PYTHONSTARTUP
-export PYTHONSTARTUP=$FWDIR/python/pyspark/shell.py
+export OLD_PYTHONSTARTUP="$PYTHONSTARTUP"
+export PYTHONSTARTUP="$FWDIR/python/pyspark/shell.py"
# If IPython options are specified, assume user wants to run IPython
if [[ -n "$IPYTHON_OPTS" ]]; then
@@ -99,10 +99,10 @@ fi
if [[ "$1" =~ \.py$ ]]; then
echo -e "\nWARNING: Running python applications through ./bin/pyspark is deprecated as of Spark 1.0." 1>&2
echo -e "Use ./bin/spark-submit <python file>\n" 1>&2
- primary=$1
+ primary="$1"
shift
gatherSparkSubmitOpts "$@"
- exec $FWDIR/bin/spark-submit "${SUBMISSION_OPTS[@]}" $primary "${APPLICATION_OPTS[@]}"
+ exec "$FWDIR"/bin/spark-submit "${SUBMISSION_OPTS[@]}" "$primary" "${APPLICATION_OPTS[@]}"
else
# PySpark shell requires special handling downstream
export PYSPARK_SHELL=1