aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorAndrew Or <andrew@databricks.com>2015-06-19 10:56:19 -0700
committerAndrew Or <andrew@databricks.com>2015-06-19 10:56:19 -0700
commit68a2dca292776d4a3f988353ba55adc73a7c1aa2 (patch)
treeeabbb0ae1733cfcb44c202fd2a3bdfe4af638a0e /core
parent866816eb97002863ec205d854e1397982aecbc5e (diff)
downloadspark-68a2dca292776d4a3f988353ba55adc73a7c1aa2.tar.gz
spark-68a2dca292776d4a3f988353ba55adc73a7c1aa2.tar.bz2
spark-68a2dca292776d4a3f988353ba55adc73a7c1aa2.zip
[SPARK-8451] [SPARK-7287] SparkSubmitSuite should check exit code
This patch also reenables the tests. Now that we have access to the log4j logs it should be easier to debug the flakiness. yhuai brkyvz Author: Andrew Or <andrew@databricks.com> Closes #6886 from andrewor14/spark-submit-suite-fix and squashes the following commits: 3f99ff1 [Andrew Or] Move destroy to finally block 9a62188 [Andrew Or] Re-enable ignored tests 2382672 [Andrew Or] Check for exit code
Diffstat (limited to 'core')
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala17
1 files changed, 12 insertions, 5 deletions
diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
index 46ea28d0f1..357ed90be3 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
@@ -325,7 +325,7 @@ class SparkSubmitSuite
runSparkSubmit(args)
}
- ignore("includes jars passed in through --jars") {
+ test("includes jars passed in through --jars") {
val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
val jar1 = TestUtils.createJarWithClasses(Seq("SparkSubmitClassA"))
val jar2 = TestUtils.createJarWithClasses(Seq("SparkSubmitClassB"))
@@ -340,7 +340,7 @@ class SparkSubmitSuite
}
// SPARK-7287
- ignore("includes jars passed in through --packages") {
+ test("includes jars passed in through --packages") {
val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
val main = MavenCoordinate("my.great.lib", "mylib", "0.1")
val dep = MavenCoordinate("my.great.dep", "mylib", "0.1")
@@ -499,9 +499,16 @@ class SparkSubmitSuite
Seq("./bin/spark-submit") ++ args,
new File(sparkHome),
Map("SPARK_TESTING" -> "1", "SPARK_HOME" -> sparkHome))
- failAfter(60 seconds) { process.waitFor() }
- // Ensure we still kill the process in case it timed out
- process.destroy()
+
+ try {
+ val exitCode = failAfter(60 seconds) { process.waitFor() }
+ if (exitCode != 0) {
+ fail(s"Process returned with exit code $exitCode. See the log4j logs for more detail.")
+ }
+ } finally {
+ // Ensure we still kill the process in case it timed out
+ process.destroy()
+ }
}
private def forConfDir(defaults: Map[String, String]) (f: String => Unit) = {