aboutsummaryrefslogtreecommitdiff
path: root/dev/run-tests.py
diff options
context:
space:
mode:
authorBrennon York <brennon.york@capitalone.com>2015-06-29 08:55:06 -0700
committerJosh Rosen <joshrosen@databricks.com>2015-06-29 08:55:06 -0700
commit5c796d576ec2de96bf72dbf6ccd0e85480a6e3b1 (patch)
tree70b1f367e1e679d746788b273ef797ed012aa3a6 /dev/run-tests.py
parent630bd5fd80193ab6dc6ad0e7bcc13ee0dadabd38 (diff)
downloadspark-5c796d576ec2de96bf72dbf6ccd0e85480a6e3b1.tar.gz
spark-5c796d576ec2de96bf72dbf6ccd0e85480a6e3b1.tar.bz2
spark-5c796d576ec2de96bf72dbf6ccd0e85480a6e3b1.zip
[SPARK-8693] [PROJECT INFRA] profiles and goals are not printed in a nice way
Hotfix to correct formatting errors of print statements within the dev and jenkins builds. Error looks like: ``` -Phadoop-1[info] Building Spark (w/Hive 0.13.1) using SBT with these arguments: -Dhadoop.version=1.0.4[info] Building Spark (w/Hive 0.13.1) using SBT with these arguments: -Pkinesis-asl[info] Building Spark (w/Hive 0.13.1) using SBT with these arguments: -Phive-thriftserver[info] Building Spark (w/Hive 0.13.1) using SBT with these arguments: -Phive[info] Building Spark (w/Hive 0.13.1) using SBT with these arguments: package[info] Building Spark (w/Hive 0.13.1) using SBT with these arguments: assembly/assembly[info] Building Spark (w/Hive 0.13.1) using SBT with these arguments: streaming-kafka-assembly/assembly ``` Author: Brennon York <brennon.york@capitalone.com> Closes #7085 from brennonyork/SPARK-8693 and squashes the following commits: c5575f1 [Brennon York] added commas to end of print statements for proper printing
Diffstat (limited to 'dev/run-tests.py')
-rwxr-xr-xdev/run-tests.py16
1 files changed, 8 insertions, 8 deletions
diff --git a/dev/run-tests.py b/dev/run-tests.py
index eb79a2a502..e5c897b94d 100755
--- a/dev/run-tests.py
+++ b/dev/run-tests.py
@@ -210,7 +210,7 @@ def build_spark_documentation():
jekyll_bin = which("jekyll")
if not jekyll_bin:
- print("[error] Cannot find a version of `jekyll` on the system; please"
+ print("[error] Cannot find a version of `jekyll` on the system; please",
" install one and retry to build documentation.")
sys.exit(int(os.environ.get("CURRENT_BLOCK", 255)))
else:
@@ -270,7 +270,7 @@ def get_hadoop_profiles(hadoop_version):
if hadoop_version in sbt_maven_hadoop_profiles:
return sbt_maven_hadoop_profiles[hadoop_version]
else:
- print("[error] Could not find", hadoop_version, "in the list. Valid options"
+ print("[error] Could not find", hadoop_version, "in the list. Valid options",
" are", sbt_maven_hadoop_profiles.keys())
sys.exit(int(os.environ.get("CURRENT_BLOCK", 255)))
@@ -281,7 +281,7 @@ def build_spark_maven(hadoop_version):
mvn_goals = ["clean", "package", "-DskipTests"]
profiles_and_goals = build_profiles + mvn_goals
- print("[info] Building Spark (w/Hive 0.13.1) using Maven with these arguments: "
+ print("[info] Building Spark (w/Hive 0.13.1) using Maven with these arguments: ",
" ".join(profiles_and_goals))
exec_maven(profiles_and_goals)
@@ -295,7 +295,7 @@ def build_spark_sbt(hadoop_version):
"streaming-kafka-assembly/assembly"]
profiles_and_goals = build_profiles + sbt_goals
- print("[info] Building Spark (w/Hive 0.13.1) using SBT with these arguments: "
+ print("[info] Building Spark (w/Hive 0.13.1) using SBT with these arguments: ",
" ".join(profiles_and_goals))
exec_sbt(profiles_and_goals)
@@ -324,7 +324,7 @@ def run_scala_tests_maven(test_profiles):
mvn_test_goals = ["test", "--fail-at-end"]
profiles_and_goals = test_profiles + mvn_test_goals
- print("[info] Running Spark tests using Maven with these arguments: "
+ print("[info] Running Spark tests using Maven with these arguments: ",
" ".join(profiles_and_goals))
exec_maven(profiles_and_goals)
@@ -339,7 +339,7 @@ def run_scala_tests_sbt(test_modules, test_profiles):
profiles_and_goals = test_profiles + list(sbt_test_goals)
- print("[info] Running Spark tests using SBT with these arguments: "
+ print("[info] Running Spark tests using SBT with these arguments: ",
" ".join(profiles_and_goals))
exec_sbt(profiles_and_goals)
@@ -382,7 +382,7 @@ def run_sparkr_tests():
def main():
# Ensure the user home directory (HOME) is valid and is an absolute directory
if not USER_HOME or not os.path.isabs(USER_HOME):
- print("[error] Cannot determine your home directory as an absolute path;"
+ print("[error] Cannot determine your home directory as an absolute path;",
" ensure the $HOME environment variable is set properly.")
sys.exit(1)
@@ -397,7 +397,7 @@ def main():
java_exe = determine_java_executable()
if not java_exe:
- print("[error] Cannot find a version of `java` on the system; please"
+ print("[error] Cannot find a version of `java` on the system; please",
" install one and retry.")
sys.exit(2)