aboutsummaryrefslogtreecommitdiff
path: root/bin
diff options
context:
space:
mode:
authorAndrew Or <andrewor14@gmail.com>2014-05-16 22:34:38 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-05-16 22:34:38 -0700
commit4b8ec6fcfd7a7ef0857d5b21917183c181301c95 (patch)
treed20ce09d28fac8caf0cec1ef68fbf72a4b3b62a1 /bin
parentc0ab85d7320cea90e6331fb03a70349bc804c1b1 (diff)
downloadspark-4b8ec6fcfd7a7ef0857d5b21917183c181301c95.tar.gz
spark-4b8ec6fcfd7a7ef0857d5b21917183c181301c95.tar.bz2
spark-4b8ec6fcfd7a7ef0857d5b21917183c181301c95.zip
[SPARK-1808] Route bin/pyspark through Spark submit
**Problem.** For `bin/pyspark`, there is currently no other way to specify Spark configuration properties other than through `SPARK_JAVA_OPTS` in `conf/spark-env.sh`. However, this mechanism is supposedly deprecated. Instead, it needs to pick up configurations explicitly specified in `conf/spark-defaults.conf`. **Solution.** Have `bin/pyspark` invoke `bin/spark-submit`, like all of its counterparts in Scala land (i.e. `bin/spark-shell`, `bin/run-example`). This has the additional benefit of making the invocation of all the user facing Spark scripts consistent. **Details.** `bin/pyspark` inherently handles two cases: (1) running python applications and (2) running the python shell. For (1), Spark submit already handles running python applications. For cases in which `bin/pyspark` is given a python file, we can simply call pass the file directly to Spark submit and let it handle the rest. For case (2), `bin/pyspark` starts a python process as before, which launches the JVM as a sub-process. The existing code already provides a code path to do this. All we needed to change is to use `bin/spark-submit` instead of `spark-class` to launch the JVM. This requires modifications to Spark submit to handle the pyspark shell as a special case. This has been tested locally (OSX and Windows 7), on a standalone cluster, and on a YARN cluster. Running IPython also works as before, except now it takes in Spark submit arguments too. Author: Andrew Or <andrewor14@gmail.com> Closes #799 from andrewor14/pyspark-submit and squashes the following commits: bf37e36 [Andrew Or] Minor changes 01066fa [Andrew Or] bin/pyspark for Windows c8cb3bf [Andrew Or] Handle perverse app names (with escaped quotes) 1866f85 [Andrew Or] Windows is not cooperating 456d844 [Andrew Or] Guard against shlex hanging if PYSPARK_SUBMIT_ARGS is not set 7eebda8 [Andrew Or] Merge branch 'master' of github.com:apache/spark into pyspark-submit b7ba0d8 [Andrew Or] Address a few comments (minor) 06eb138 [Andrew Or] Use shlex instead of writing our own parser 05879fa [Andrew Or] Merge branch 'master' of github.com:apache/spark into pyspark-submit a823661 [Andrew Or] Fix --die-on-broken-pipe not propagated properly 6fba412 [Andrew Or] Deal with quotes + address various comments fe4c8a7 [Andrew Or] Update --help for bin/pyspark afe47bf [Andrew Or] Fix spark shell f04aaa4 [Andrew Or] Merge branch 'master' of github.com:apache/spark into pyspark-submit a371d26 [Andrew Or] Route bin/pyspark through Spark submit
Diffstat (limited to 'bin')
-rwxr-xr-xbin/pyspark35
-rw-r--r--bin/pyspark2.cmd21
-rwxr-xr-xbin/spark-shell6
-rwxr-xr-xbin/spark-shell.cmd2
4 files changed, 53 insertions, 11 deletions
diff --git a/bin/pyspark b/bin/pyspark
index 10e35e0f17..9e1364e44c 100755
--- a/bin/pyspark
+++ b/bin/pyspark
@@ -25,6 +25,12 @@ export SPARK_HOME="$FWDIR"
SCALA_VERSION=2.10
+if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
+ echo "Usage: ./bin/pyspark [options]"
+ ./bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
+ exit 0
+fi
+
# Exit if the user hasn't compiled Spark
if [ ! -f "$FWDIR/RELEASE" ]; then
# Exit if the user hasn't compiled Spark
@@ -52,13 +58,34 @@ export PYTHONPATH=$SPARK_HOME/python/lib/py4j-0.8.1-src.zip:$PYTHONPATH
export OLD_PYTHONSTARTUP=$PYTHONSTARTUP
export PYTHONSTARTUP=$FWDIR/python/pyspark/shell.py
+# If IPython options are specified, assume user wants to run IPython
if [ -n "$IPYTHON_OPTS" ]; then
IPYTHON=1
fi
-# Only use ipython if no command line arguments were provided [SPARK-1134]
-if [[ "$IPYTHON" = "1" && $# = 0 ]] ; then
- exec ipython $IPYTHON_OPTS
+# Build up arguments list manually to preserve quotes and backslashes.
+# We export Spark submit arguments as an environment variable because shell.py must run as a
+# PYTHONSTARTUP script, which does not take in arguments. This is required for IPython notebooks.
+
+PYSPARK_SUBMIT_ARGS=""
+whitespace="[[:space:]]"
+for i in "$@"; do
+ if [[ $i =~ \" ]]; then i=$(echo $i | sed 's/\"/\\\"/g'); fi
+ if [[ $i =~ $whitespace ]]; then i=\"$i\"; fi
+ PYSPARK_SUBMIT_ARGS="$PYSPARK_SUBMIT_ARGS $i"
+done
+export PYSPARK_SUBMIT_ARGS
+
+# If a python file is provided, directly run spark-submit.
+if [[ "$1" =~ \.py$ ]]; then
+ echo -e "\nWARNING: Running python applications through ./bin/pyspark is deprecated as of Spark 1.0." 1>&2
+ echo -e "Use ./bin/spark-submit <python file>\n" 1>&2
+ exec $FWDIR/bin/spark-submit "$@"
else
- exec "$PYSPARK_PYTHON" "$@"
+ # Only use ipython if no command line arguments were provided [SPARK-1134]
+ if [[ "$IPYTHON" = "1" ]]; then
+ exec ipython $IPYTHON_OPTS
+ else
+ exec "$PYSPARK_PYTHON"
+ fi
fi
diff --git a/bin/pyspark2.cmd b/bin/pyspark2.cmd
index d7cfd5eec5..0ef9eea953 100644
--- a/bin/pyspark2.cmd
+++ b/bin/pyspark2.cmd
@@ -31,7 +31,7 @@ set FOUND_JAR=0
for %%d in ("%FWDIR%assembly\target\scala-%SCALA_VERSION%\spark-assembly*hadoop*.jar") do (
set FOUND_JAR=1
)
-if "%FOUND_JAR%"=="0" (
+if [%FOUND_JAR%] == [0] (
echo Failed to find Spark assembly JAR.
echo You need to build Spark with sbt\sbt assembly before running this program.
goto exit
@@ -42,15 +42,30 @@ rem Load environment variables from conf\spark-env.cmd, if it exists
if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd"
rem Figure out which Python to use.
-if "x%PYSPARK_PYTHON%"=="x" set PYSPARK_PYTHON=python
+if [%PYSPARK_PYTHON%] == [] set PYSPARK_PYTHON=python
set PYTHONPATH=%FWDIR%python;%PYTHONPATH%
set PYTHONPATH=%FWDIR%python\lib\py4j-0.8.1-src.zip;%PYTHONPATH%
set OLD_PYTHONSTARTUP=%PYTHONSTARTUP%
set PYTHONSTARTUP=%FWDIR%python\pyspark\shell.py
+set PYSPARK_SUBMIT_ARGS=%*
echo Running %PYSPARK_PYTHON% with PYTHONPATH=%PYTHONPATH%
-"%PYSPARK_PYTHON%" %*
+rem Check whether the argument is a file
+for /f %%i in ('echo %1^| findstr /R "\.py"') do (
+ set PYTHON_FILE=%%i
+)
+
+if [%PYTHON_FILE%] == [] (
+ %PYSPARK_PYTHON%
+) else (
+ echo.
+ echo WARNING: Running python applications through ./bin/pyspark.cmd is deprecated as of Spark 1.0.
+ echo Use ./bin/spark-submit ^<python file^>
+ echo.
+ "%FWDIR%\bin\spark-submit.cmd" %PYSPARK_SUBMIT_ARGS%
+)
+
:exit
diff --git a/bin/spark-shell b/bin/spark-shell
index 7f03349c5e..c158683ab3 100755
--- a/bin/spark-shell
+++ b/bin/spark-shell
@@ -28,7 +28,7 @@ esac
# Enter posix mode for bash
set -o posix
-if [[ "$@" == *--help* ]]; then
+if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
echo "Usage: ./bin/spark-shell [options]"
./bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
exit 0
@@ -46,11 +46,11 @@ function main(){
# (see https://github.com/sbt/sbt/issues/562).
stty -icanon min 1 -echo > /dev/null 2>&1
export SPARK_SUBMIT_OPTS="$SPARK_SUBMIT_OPTS -Djline.terminal=unix"
- $FWDIR/bin/spark-submit spark-internal "$@" --class org.apache.spark.repl.Main
+ $FWDIR/bin/spark-submit spark-shell "$@" --class org.apache.spark.repl.Main
stty icanon echo > /dev/null 2>&1
else
export SPARK_SUBMIT_OPTS
- $FWDIR/bin/spark-submit spark-internal "$@" --class org.apache.spark.repl.Main
+ $FWDIR/bin/spark-submit spark-shell "$@" --class org.apache.spark.repl.Main
fi
}
diff --git a/bin/spark-shell.cmd b/bin/spark-shell.cmd
index ca0c722c92..4b9708a8c0 100755
--- a/bin/spark-shell.cmd
+++ b/bin/spark-shell.cmd
@@ -19,4 +19,4 @@ rem
set SPARK_HOME=%~dp0..
-cmd /V /E /C %SPARK_HOME%\bin\spark-submit.cmd spark-internal %* --class org.apache.spark.repl.Main
+cmd /V /E /C %SPARK_HOME%\bin\spark-submit.cmd spark-shell %* --class org.apache.spark.repl.Main