aboutsummaryrefslogtreecommitdiff
path: root/dev/run-tests
diff options
context:
space:
mode:
authorNicholas Chammas <nicholas.chammas@gmail.com>2014-10-06 14:19:06 -0700
committerJosh Rosen <joshrosen@apache.org>2014-10-06 14:19:06 -0700
commit69c3f441a9b6e942d6c08afecd59a0349d61cc7b (patch)
treeab86b2ae7d03d55eae6c6d1a622fd0f8b945421b /dev/run-tests
parent2300eb58ae79a86e65b3ff608a578f5d4c09892b (diff)
downloadspark-69c3f441a9b6e942d6c08afecd59a0349d61cc7b.tar.gz
spark-69c3f441a9b6e942d6c08afecd59a0349d61cc7b.tar.bz2
spark-69c3f441a9b6e942d6c08afecd59a0349d61cc7b.zip
[SPARK-3479] [Build] Report failed test category
This PR allows SparkQA (i.e. Jenkins) to report in its posts to GitHub what category of test failed, if one can be determined. The failure categories are: * general failure * RAT checks failed * Scala style checks failed * Python style checks failed * Build failed * Spark unit tests failed * PySpark unit tests failed * MiMa checks failed This PR also fixes the diffing logic used to determine if a patch introduces new classes. Author: Nicholas Chammas <nicholas.chammas@gmail.com> Closes #2606 from nchammas/report-failed-test-category and squashes the following commits: d67df03 [Nicholas Chammas] report what test category failed
Diffstat (limited to 'dev/run-tests')
-rwxr-xr-xdev/run-tests32
1 files changed, 30 insertions, 2 deletions
diff --git a/dev/run-tests b/dev/run-tests
index c3d8f49cdd..4be2baaf48 100755
--- a/dev/run-tests
+++ b/dev/run-tests
@@ -24,6 +24,16 @@ cd "$FWDIR"
# Remove work directory
rm -rf ./work
+source "$FWDIR/dev/run-tests-codes.sh"
+
+CURRENT_BLOCK=$BLOCK_GENERAL
+
+function handle_error () {
+ echo "[error] Got a return code of $? on line $1 of the run-tests script."
+ exit $CURRENT_BLOCK
+}
+
+
# Build against the right verison of Hadoop.
{
if [ -n "$AMPLAB_JENKINS_BUILD_PROFILE" ]; then
@@ -91,26 +101,34 @@ if [ -n "$AMPLAB_JENKINS" ]; then
fi
fi
-# Fail fast
-set -e
set -o pipefail
+trap 'handle_error $LINENO' ERR
echo ""
echo "========================================================================="
echo "Running Apache RAT checks"
echo "========================================================================="
+
+CURRENT_BLOCK=$BLOCK_RAT
+
./dev/check-license
echo ""
echo "========================================================================="
echo "Running Scala style checks"
echo "========================================================================="
+
+CURRENT_BLOCK=$BLOCK_SCALA_STYLE
+
./dev/lint-scala
echo ""
echo "========================================================================="
echo "Running Python style checks"
echo "========================================================================="
+
+CURRENT_BLOCK=$BLOCK_PYTHON_STYLE
+
./dev/lint-python
echo ""
@@ -118,6 +136,8 @@ echo "========================================================================="
echo "Building Spark"
echo "========================================================================="
+CURRENT_BLOCK=$BLOCK_BUILD
+
{
# We always build with Hive because the PySpark Spark SQL tests need it.
BUILD_MVN_PROFILE_ARGS="$SBT_MAVEN_PROFILES_ARGS -Phive"
@@ -141,6 +161,8 @@ echo "========================================================================="
echo "Running Spark unit tests"
echo "========================================================================="
+CURRENT_BLOCK=$BLOCK_SPARK_UNIT_TESTS
+
{
# If the Spark SQL tests are enabled, run the tests with the Hive profiles enabled.
# This must be a single argument, as it is.
@@ -175,10 +197,16 @@ echo ""
echo "========================================================================="
echo "Running PySpark tests"
echo "========================================================================="
+
+CURRENT_BLOCK=$BLOCK_PYSPARK_UNIT_TESTS
+
./python/run-tests
echo ""
echo "========================================================================="
echo "Detecting binary incompatibilites with MiMa"
echo "========================================================================="
+
+CURRENT_BLOCK=$BLOCK_MIMA
+
./dev/mima