diff options
author | Brennon York <brennon.york@capitalone.com> | 2015-10-18 22:45:14 -0700 |
---|---|---|
committer | Josh Rosen <joshrosen@databricks.com> | 2015-10-18 22:45:27 -0700 |
commit | d3180c25d8cf0899a7238e7d24b35c5ae918cc1d (patch) | |
tree | 851119cf665da91ea8b641ccafd9378aa0db3d90 /dev/run-tests.py | |
parent | 94c8fef296e5cdac9a93ed34acc079e51839caa7 (diff) | |
download | spark-d3180c25d8cf0899a7238e7d24b35c5ae918cc1d.tar.gz spark-d3180c25d8cf0899a7238e7d24b35c5ae918cc1d.tar.bz2 spark-d3180c25d8cf0899a7238e7d24b35c5ae918cc1d.zip |
[SPARK-7018][BUILD] Refactor dev/run-tests-jenkins into Python
This commit refactors the `run-tests-jenkins` script into Python. This refactoring was done by brennonyork in #7401; this PR contains a few minor edits from joshrosen in order to bring it up to date with other recent changes.
From the original PR description (by brennonyork):
Currently a few things are left out that, could and I think should, be smaller JIRA's after this.
1. There are still a few areas where we use environment variables where we don't need to (like `CURRENT_BLOCK`). I might get around to fixing this one in lieu of everything else, but wanted to point that out.
2. The PR tests are still written in bash. I opted to not change those and just rewrite the runner into Python. This is a great follow-on JIRA IMO.
3. All of the linting scripts are still in bash as well and would likely do to just add those in as follow-on JIRA's as well.
Closes #7401.
Author: Brennon York <brennon.york@capitalone.com>
Closes #9161 from JoshRosen/run-tests-jenkins-refactoring.
Diffstat (limited to 'dev/run-tests.py')
-rwxr-xr-x | dev/run-tests.py | 20 |
1 files changed, 4 insertions, 16 deletions
diff --git a/dev/run-tests.py b/dev/run-tests.py index d4d6880491..6b4b710734 100755 --- a/dev/run-tests.py +++ b/dev/run-tests.py @@ -27,10 +27,11 @@ import sys import subprocess from collections import namedtuple -from sparktestsupport import SPARK_HOME, USER_HOME +from sparktestsupport import SPARK_HOME, USER_HOME, ERROR_CODES from sparktestsupport.shellutils import exit_from_command_with_retcode, run_cmd, rm_r, which import sparktestsupport.modules as modules + # ------------------------------------------------------------------------------------------------- # Functions for traversing module dependency graph # ------------------------------------------------------------------------------------------------- @@ -130,19 +131,6 @@ def determine_tags_to_exclude(changed_modules): # Functions for working with subprocesses and shell tools # ------------------------------------------------------------------------------------------------- -def get_error_codes(err_code_file): - """Function to retrieve all block numbers from the `run-tests-codes.sh` - file to maintain backwards compatibility with the `run-tests-jenkins` - script""" - - with open(err_code_file, 'r') as f: - err_codes = [e.split()[1].strip().split('=') - for e in f if e.startswith("readonly")] - return dict(err_codes) - - -ERROR_CODES = get_error_codes(os.path.join(SPARK_HOME, "dev/run-tests-codes.sh")) - def determine_java_executable(): """Will return the path of the java executable that will be used by Spark's @@ -191,7 +179,7 @@ def determine_java_version(java_exe): def set_title_and_block(title, err_block): - os.environ["CURRENT_BLOCK"] = ERROR_CODES[err_block] + os.environ["CURRENT_BLOCK"] = str(ERROR_CODES[err_block]) line_str = '=' * 72 print('') @@ -467,7 +455,7 @@ def main(): rm_r(os.path.join(USER_HOME, ".ivy2", "local", "org.apache.spark")) rm_r(os.path.join(USER_HOME, ".ivy2", "cache", "org.apache.spark")) - os.environ["CURRENT_BLOCK"] = ERROR_CODES["BLOCK_GENERAL"] + os.environ["CURRENT_BLOCK"] = str(ERROR_CODES["BLOCK_GENERAL"]) java_exe = determine_java_executable() |