diff options
author | Tathagata Das <tathagata.das1565@gmail.com> | 2014-05-22 20:48:55 -0700 |
---|---|---|
committer | Tathagata Das <tathagata.das1565@gmail.com> | 2014-05-22 20:48:55 -0700 |
commit | b2bdd0e505f1ae3d39c46139f17bd43779ece635 (patch) | |
tree | fd1f6274986c3bf259c0dd3a0adaf6c2cfddc1a1 /dev/audit-release/audit_release.py | |
parent | cce77457e00aa5f1f4db3d50454cf257efb156ed (diff) | |
download | spark-b2bdd0e505f1ae3d39c46139f17bd43779ece635.tar.gz spark-b2bdd0e505f1ae3d39c46139f17bd43779ece635.tar.bz2 spark-b2bdd0e505f1ae3d39c46139f17bd43779ece635.zip |
Updated scripts for auditing releases
- Added script to automatically generate change list CHANGES.txt
- Added test for verifying linking against maven distributions of `spark-sql` and `spark-hive`
- Added SBT projects for testing functionality of `spark-sql` and `spark-hive`
- Fixed issues in existing tests that might have come up because of changes in Spark 1.0
Author: Tathagata Das <tathagata.das1565@gmail.com>
Closes #844 from tdas/update-dev-scripts and squashes the following commits:
25090ba [Tathagata Das] Added missing license
e2e20b3 [Tathagata Das] Updated tests for auditing releases.
Diffstat (limited to 'dev/audit-release/audit_release.py')
-rwxr-xr-x | dev/audit-release/audit_release.py | 11 |
1 files changed, 7 insertions, 4 deletions
diff --git a/dev/audit-release/audit_release.py b/dev/audit-release/audit_release.py index 4a816d4101..8c7573b91f 100755 --- a/dev/audit-release/audit_release.py +++ b/dev/audit-release/audit_release.py @@ -93,9 +93,12 @@ original_dir = os.getcwd() # For each of these modules, we'll test an 'empty' application in sbt and # maven that links against them. This will catch issues with messed up # dependencies within those projects. -modules = ["spark-core", "spark-bagel", "spark-mllib", "spark-streaming", "spark-repl", - "spark-graphx", "spark-streaming-flume", "spark-streaming-kafka", - "spark-streaming-mqtt", "spark-streaming-twitter", "spark-streaming-zeromq"] +modules = [ + "spark-core", "spark-bagel", "spark-mllib", "spark-streaming", "spark-repl", + "spark-graphx", "spark-streaming-flume", "spark-streaming-kafka", + "spark-streaming-mqtt", "spark-streaming-twitter", "spark-streaming-zeromq", + "spark-catalyst", "spark-sql", "spark-hive" +] modules = map(lambda m: "%s_%s" % (m, SCALA_BINARY_VERSION), modules) # Check for directories that might interfere with tests @@ -122,7 +125,7 @@ for module in modules: os.chdir(original_dir) # SBT application tests -for app in ["sbt_app_core", "sbt_app_graphx", "sbt_app_streaming"]: +for app in ["sbt_app_core", "sbt_app_graphx", "sbt_app_streaming", "sbt_app_sql", "sbt_app_hive"]: os.chdir(app) ret = run_cmd("sbt clean run", exit_on_failure=False) test(ret == 0, "sbt application (%s)" % app) |