aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/RPackageUtilsSuite.scala12
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala6
-rw-r--r--sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala21
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala17
4 files changed, 38 insertions, 18 deletions
diff --git a/core/src/test/scala/org/apache/spark/deploy/RPackageUtilsSuite.scala b/core/src/test/scala/org/apache/spark/deploy/RPackageUtilsSuite.scala
index 1ed4bae3ca..cc30ba223e 100644
--- a/core/src/test/scala/org/apache/spark/deploy/RPackageUtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/RPackageUtilsSuite.scala
@@ -33,8 +33,12 @@ import org.scalatest.BeforeAndAfterEach
import org.apache.spark.SparkFunSuite
import org.apache.spark.api.r.RUtils
import org.apache.spark.deploy.SparkSubmitUtils.MavenCoordinate
+import org.apache.spark.util.ResetSystemProperties
-class RPackageUtilsSuite extends SparkFunSuite with BeforeAndAfterEach {
+class RPackageUtilsSuite
+ extends SparkFunSuite
+ with BeforeAndAfterEach
+ with ResetSystemProperties {
private val main = MavenCoordinate("a", "b", "c")
private val dep1 = MavenCoordinate("a", "dep1", "c")
@@ -60,11 +64,9 @@ class RPackageUtilsSuite extends SparkFunSuite with BeforeAndAfterEach {
}
}
- def beforeAll() {
- System.setProperty("spark.testing", "true")
- }
-
override def beforeEach(): Unit = {
+ super.beforeEach()
+ System.setProperty("spark.testing", "true")
lineBuffer.clear()
}
diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
index 1fd470cd3b..66a5051200 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
@@ -23,7 +23,7 @@ import scala.collection.mutable.ArrayBuffer
import com.google.common.base.Charsets.UTF_8
import com.google.common.io.ByteStreams
-import org.scalatest.Matchers
+import org.scalatest.{BeforeAndAfterEach, Matchers}
import org.scalatest.concurrent.Timeouts
import org.scalatest.time.SpanSugar._
@@ -37,10 +37,12 @@ import org.apache.spark.util.{ResetSystemProperties, Utils}
class SparkSubmitSuite
extends SparkFunSuite
with Matchers
+ with BeforeAndAfterEach
with ResetSystemProperties
with Timeouts {
- def beforeAll() {
+ override def beforeEach() {
+ super.beforeEach()
System.setProperty("spark.testing", "true")
}
diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
index 3fa5c8528b..fcf0399169 100644
--- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
+++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
@@ -27,7 +27,7 @@ import scala.concurrent.{Await, Promise}
import org.apache.spark.sql.test.ProcessTestUtils.ProcessOutputCapturer
import org.apache.hadoop.hive.conf.HiveConf.ConfVars
-import org.scalatest.BeforeAndAfter
+import org.scalatest.BeforeAndAfterAll
import org.apache.spark.util.Utils
import org.apache.spark.{Logging, SparkFunSuite}
@@ -36,21 +36,26 @@ import org.apache.spark.{Logging, SparkFunSuite}
* A test suite for the `spark-sql` CLI tool. Note that all test cases share the same temporary
* Hive metastore and warehouse.
*/
-class CliSuite extends SparkFunSuite with BeforeAndAfter with Logging {
+class CliSuite extends SparkFunSuite with BeforeAndAfterAll with Logging {
val warehousePath = Utils.createTempDir()
val metastorePath = Utils.createTempDir()
val scratchDirPath = Utils.createTempDir()
- before {
+ override def beforeAll(): Unit = {
+ super.beforeAll()
warehousePath.delete()
metastorePath.delete()
scratchDirPath.delete()
}
- after {
- warehousePath.delete()
- metastorePath.delete()
- scratchDirPath.delete()
+ override def afterAll(): Unit = {
+ try {
+ warehousePath.delete()
+ metastorePath.delete()
+ scratchDirPath.delete()
+ } finally {
+ super.afterAll()
+ }
}
/**
@@ -79,6 +84,8 @@ class CliSuite extends SparkFunSuite with BeforeAndAfter with Logging {
val jdbcUrl = s"jdbc:derby:;databaseName=$metastorePath;create=true"
s"""$cliScript
| --master local
+ | --driver-java-options -Dderby.system.durability=test
+ | --conf spark.ui.enabled=false
| --hiveconf ${ConfVars.METASTORECONNECTURLKEY}=$jdbcUrl
| --hiveconf ${ConfVars.METASTOREWAREHOUSE}=$warehousePath
| --hiveconf ${ConfVars.SCRATCHDIR}=$scratchDirPath
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
index 10e4ae2c50..24a3afee14 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
@@ -23,7 +23,7 @@ import java.util.Date
import scala.collection.mutable.ArrayBuffer
-import org.scalatest.Matchers
+import org.scalatest.{BeforeAndAfterEach, Matchers}
import org.scalatest.concurrent.Timeouts
import org.scalatest.exceptions.TestFailedDueToTimeoutException
import org.scalatest.time.SpanSugar._
@@ -42,14 +42,14 @@ import org.apache.spark.util.{ResetSystemProperties, Utils}
class HiveSparkSubmitSuite
extends SparkFunSuite
with Matchers
- // This test suite sometimes gets extremely slow out of unknown reason on Jenkins. Here we
- // add a timestamp to provide more diagnosis information.
+ with BeforeAndAfterEach
with ResetSystemProperties
with Timeouts {
// TODO: rewrite these or mark them as slow tests to be run sparingly
- def beforeAll() {
+ override def beforeEach() {
+ super.beforeEach()
System.setProperty("spark.testing", "true")
}
@@ -66,6 +66,7 @@ class HiveSparkSubmitSuite
"--master", "local-cluster[2,1,1024]",
"--conf", "spark.ui.enabled=false",
"--conf", "spark.master.rest.enabled=false",
+ "--driver-java-options", "-Dderby.system.durability=test",
"--jars", jarsString,
unusedJar.toString, "SparkSubmitClassA", "SparkSubmitClassB")
runSparkSubmit(args)
@@ -79,6 +80,7 @@ class HiveSparkSubmitSuite
"--master", "local-cluster[2,1,1024]",
"--conf", "spark.ui.enabled=false",
"--conf", "spark.master.rest.enabled=false",
+ "--driver-java-options", "-Dderby.system.durability=test",
unusedJar.toString)
runSparkSubmit(args)
}
@@ -93,6 +95,7 @@ class HiveSparkSubmitSuite
val args = Seq(
"--conf", "spark.ui.enabled=false",
"--conf", "spark.master.rest.enabled=false",
+ "--driver-java-options", "-Dderby.system.durability=test",
"--class", "Main",
testJar)
runSparkSubmit(args)
@@ -104,6 +107,9 @@ class HiveSparkSubmitSuite
"--class", SPARK_9757.getClass.getName.stripSuffix("$"),
"--name", "SparkSQLConfTest",
"--master", "local-cluster[2,1,1024]",
+ "--conf", "spark.ui.enabled=false",
+ "--conf", "spark.master.rest.enabled=false",
+ "--driver-java-options", "-Dderby.system.durability=test",
unusedJar.toString)
runSparkSubmit(args)
}
@@ -114,6 +120,9 @@ class HiveSparkSubmitSuite
"--class", SPARK_11009.getClass.getName.stripSuffix("$"),
"--name", "SparkSQLConfTest",
"--master", "local-cluster[2,1,1024]",
+ "--conf", "spark.ui.enabled=false",
+ "--conf", "spark.master.rest.enabled=false",
+ "--driver-java-options", "-Dderby.system.durability=test",
unusedJar.toString)
runSparkSubmit(args)
}