aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndrew Or <andrewor14@gmail.com>2014-06-11 12:11:46 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-06-11 12:11:46 -0700
commitfe78b8b6f7e3fe519659134c6fcaf7344077ead8 (patch)
tree037a3ebb9fa9bfcf3b2ba59f628b41d91383691f
parentce6deb1e5b4cd40c97730fcf5dc89cb2f624bce2 (diff)
downloadspark-fe78b8b6f7e3fe519659134c6fcaf7344077ead8.tar.gz
spark-fe78b8b6f7e3fe519659134c6fcaf7344077ead8.tar.bz2
spark-fe78b8b6f7e3fe519659134c6fcaf7344077ead8.zip
HOTFIX: A few PySpark tests were not actually run
This is a hot fix for the hot fix in fb499be1ac935b6f91046ec8ff23ac1267c82342. The changes in that commit did not actually cause the `doctest` module in python to be loaded for the following tests: - pyspark/broadcast.py - pyspark/accumulators.py - pyspark/serializers.py (@pwendell I might have told you the wrong thing) Author: Andrew Or <andrewor14@gmail.com> Closes #1053 from andrewor14/python-test-fix and squashes the following commits: d2e5401 [Andrew Or] Explain why these tests are handled differently 0bd6fdd [Andrew Or] Fix 3 pyspark tests not being invoked
-rwxr-xr-xbin/pyspark20
-rwxr-xr-xpython/run-tests5
2 files changed, 17 insertions, 8 deletions
diff --git a/bin/pyspark b/bin/pyspark
index 114cbbc3a8..0b5ed40e21 100755
--- a/bin/pyspark
+++ b/bin/pyspark
@@ -45,7 +45,7 @@ fi
. $FWDIR/bin/load-spark-env.sh
# Figure out which Python executable to use
-if [ -z "$PYSPARK_PYTHON" ] ; then
+if [[ -z "$PYSPARK_PYTHON" ]]; then
PYSPARK_PYTHON="python"
fi
export PYSPARK_PYTHON
@@ -59,7 +59,7 @@ export OLD_PYTHONSTARTUP=$PYTHONSTARTUP
export PYTHONSTARTUP=$FWDIR/python/pyspark/shell.py
# If IPython options are specified, assume user wants to run IPython
-if [ -n "$IPYTHON_OPTS" ]; then
+if [[ -n "$IPYTHON_OPTS" ]]; then
IPYTHON=1
fi
@@ -76,6 +76,16 @@ for i in "$@"; do
done
export PYSPARK_SUBMIT_ARGS
+# For pyspark tests
+if [[ -n "$SPARK_TESTING" ]]; then
+ if [[ -n "$PYSPARK_DOC_TEST" ]]; then
+ exec "$PYSPARK_PYTHON" -m doctest $1
+ else
+ exec "$PYSPARK_PYTHON" $1
+ fi
+ exit
+fi
+
# If a python file is provided, directly run spark-submit.
if [[ "$1" =~ \.py$ ]]; then
echo -e "\nWARNING: Running python applications through ./bin/pyspark is deprecated as of Spark 1.0." 1>&2
@@ -86,10 +96,6 @@ else
if [[ "$IPYTHON" = "1" ]]; then
exec ipython $IPYTHON_OPTS
else
- if [[ -n $SPARK_TESTING ]]; then
- exec "$PYSPARK_PYTHON" -m doctest
- else
- exec "$PYSPARK_PYTHON"
- fi
+ exec "$PYSPARK_PYTHON"
fi
fi
diff --git a/python/run-tests b/python/run-tests
index 3b4501178c..9282aa47e8 100755
--- a/python/run-tests
+++ b/python/run-tests
@@ -44,7 +44,6 @@ function run_test() {
echo -en "\033[0m" # No color
exit -1
fi
-
}
echo "Running PySpark tests. Output is in python/unit-tests.log."
@@ -55,9 +54,13 @@ run_test "pyspark/conf.py"
if [ -n "$_RUN_SQL_TESTS" ]; then
run_test "pyspark/sql.py"
fi
+# These tests are included in the module-level docs, and so must
+# be handled on a higher level rather than within the python file.
+export PYSPARK_DOC_TEST=1
run_test "pyspark/broadcast.py"
run_test "pyspark/accumulators.py"
run_test "pyspark/serializers.py"
+unset PYSPARK_DOC_TEST
run_test "pyspark/tests.py"
run_test "pyspark/mllib/_common.py"
run_test "pyspark/mllib/classification.py"