diff options
author | Josh Rosen <joshrosen@databricks.com> | 2015-12-30 12:47:42 -0800 |
---|---|---|
committer | Josh Rosen <joshrosen@databricks.com> | 2015-12-30 12:47:42 -0800 |
commit | 27a42c7108ced48a7f558990de2e4fc7ed340119 (patch) | |
tree | e65525f7dee5ceae053643ac3f5e8b4a1716272b /dev/sparktestsupport | |
parent | d1ca634db4ca9db7f0ba7ca38a0e03bcbfec23c9 (diff) | |
download | spark-27a42c7108ced48a7f558990de2e4fc7ed340119.tar.gz spark-27a42c7108ced48a7f558990de2e4fc7ed340119.tar.bz2 spark-27a42c7108ced48a7f558990de2e4fc7ed340119.zip |
[SPARK-10359] Enumerate dependencies in a file and diff against it for new pull requests
This patch adds a new build check which enumerates Spark's resolved runtime classpath and saves it to a file, then diffs against that file to detect whether pull requests have introduced dependency changes. The aim of this check is to make it simpler to reason about whether pull request which modify the build have introduced new dependencies or changed transitive dependencies in a way that affects the final classpath.
This supplants the checks added in SPARK-4123 / #5093, which are currently disabled due to bugs.
This patch is based on pwendell's work in #8531.
Closes #8531.
Author: Josh Rosen <joshrosen@databricks.com>
Author: Patrick Wendell <patrick@databricks.com>
Closes #10461 from JoshRosen/SPARK-10359.
Diffstat (limited to 'dev/sparktestsupport')
-rw-r--r-- | dev/sparktestsupport/__init__.py | 1 | ||||
-rw-r--r-- | dev/sparktestsupport/modules.py | 15 |
2 files changed, 14 insertions, 2 deletions
diff --git a/dev/sparktestsupport/__init__.py b/dev/sparktestsupport/__init__.py index 0e8032d133..89015f8c4f 100644 --- a/dev/sparktestsupport/__init__.py +++ b/dev/sparktestsupport/__init__.py @@ -32,5 +32,6 @@ ERROR_CODES = { "BLOCK_PYSPARK_UNIT_TESTS": 19, "BLOCK_SPARKR_UNIT_TESTS": 20, "BLOCK_JAVA_STYLE": 21, + "BLOCK_BUILD_TESTS": 22, "BLOCK_TIMEOUT": 124 } diff --git a/dev/sparktestsupport/modules.py b/dev/sparktestsupport/modules.py index d65547e04d..4667b289f5 100644 --- a/dev/sparktestsupport/modules.py +++ b/dev/sparktestsupport/modules.py @@ -31,7 +31,7 @@ class Module(object): def __init__(self, name, dependencies, source_file_regexes, build_profile_flags=(), environ={}, sbt_test_goals=(), python_test_goals=(), blacklisted_python_implementations=(), - test_tags=(), should_run_r_tests=False): + test_tags=(), should_run_r_tests=False, should_run_build_tests=False): """ Define a new module. @@ -53,6 +53,7 @@ class Module(object): :param test_tags A set of tags that will be excluded when running unit tests if the module is not explicitly changed. :param should_run_r_tests: If true, changes in this module will trigger all R tests. + :param should_run_build_tests: If true, changes in this module will trigger build tests. """ self.name = name self.dependencies = dependencies @@ -64,6 +65,7 @@ class Module(object): self.blacklisted_python_implementations = blacklisted_python_implementations self.test_tags = test_tags self.should_run_r_tests = should_run_r_tests + self.should_run_build_tests = should_run_build_tests self.dependent_modules = set() for dep in dependencies: @@ -394,6 +396,14 @@ docs = Module( ] ) +build = Module( + name="build", + dependencies=[], + source_file_regexes=[ + ".*pom.xml", + "dev/test-dependencies.sh", + ] +) ec2 = Module( name="ec2", @@ -433,5 +443,6 @@ root = Module( "test", ], python_test_goals=list(itertools.chain.from_iterable(m.python_test_goals for m in all_modules)), - should_run_r_tests=True + should_run_r_tests=True, + should_run_build_tests=True ) |