aboutsummaryrefslogtreecommitdiff
path: root/mllib
diff options
context:
space:
mode:
authorKazuaki Ishizaki <ishizaki@jp.ibm.com>2015-12-24 13:37:28 +0000
committerSean Owen <sowen@cloudera.com>2015-12-24 13:37:28 +0000
commit392046611837a3a740ff97fa8177ca7c12316fb7 (patch)
tree3c5149701ceaec57d12dff971fc0a34c05669c31 /mllib
parent9e85bb71ad2d7d3a9da0cb8853f3216d37e6ff47 (diff)
downloadspark-392046611837a3a740ff97fa8177ca7c12316fb7.tar.gz
spark-392046611837a3a740ff97fa8177ca7c12316fb7.tar.bz2
spark-392046611837a3a740ff97fa8177ca7c12316fb7.zip
[SPARK-12311][CORE] Restore previous value of "os.arch" property in test suites after forcing to set specific value to "os.arch" property
Restore the original value of os.arch property after each test Since some of tests forced to set the specific value to os.arch property, we need to set the original value. Author: Kazuaki Ishizaki <ishizaki@jp.ibm.com> Closes #10289 from kiszk/SPARK-12311.
Diffstat (limited to 'mllib')
-rw-r--r--mllib/src/test/scala/org/apache/spark/ml/source/libsvm/LibSVMRelationSuite.scala7
-rw-r--r--mllib/src/test/scala/org/apache/spark/ml/util/TempDirectory.scala7
-rw-r--r--mllib/src/test/scala/org/apache/spark/mllib/util/LocalClusterSparkContext.scala11
-rw-r--r--mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala15
4 files changed, 26 insertions, 14 deletions
diff --git a/mllib/src/test/scala/org/apache/spark/ml/source/libsvm/LibSVMRelationSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/source/libsvm/LibSVMRelationSuite.scala
index 997f574e51..5f4d5f11bd 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/source/libsvm/LibSVMRelationSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/source/libsvm/LibSVMRelationSuite.scala
@@ -46,8 +46,11 @@ class LibSVMRelationSuite extends SparkFunSuite with MLlibTestSparkContext {
}
override def afterAll(): Unit = {
- Utils.deleteRecursively(tempDir)
- super.afterAll()
+ try {
+ Utils.deleteRecursively(tempDir)
+ } finally {
+ super.afterAll()
+ }
}
test("select as sparse vector") {
diff --git a/mllib/src/test/scala/org/apache/spark/ml/util/TempDirectory.scala b/mllib/src/test/scala/org/apache/spark/ml/util/TempDirectory.scala
index c8a0bb1624..8f11bbc8e4 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/util/TempDirectory.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/util/TempDirectory.scala
@@ -39,7 +39,10 @@ trait TempDirectory extends BeforeAndAfterAll { self: Suite =>
}
override def afterAll(): Unit = {
- Utils.deleteRecursively(_tempDir)
- super.afterAll()
+ try {
+ Utils.deleteRecursively(_tempDir)
+ } finally {
+ super.afterAll()
+ }
}
}
diff --git a/mllib/src/test/scala/org/apache/spark/mllib/util/LocalClusterSparkContext.scala b/mllib/src/test/scala/org/apache/spark/mllib/util/LocalClusterSparkContext.scala
index 525ab68c79..4f73b0809d 100644
--- a/mllib/src/test/scala/org/apache/spark/mllib/util/LocalClusterSparkContext.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/util/LocalClusterSparkContext.scala
@@ -25,18 +25,21 @@ trait LocalClusterSparkContext extends BeforeAndAfterAll { self: Suite =>
@transient var sc: SparkContext = _
override def beforeAll() {
+ super.beforeAll()
val conf = new SparkConf()
.setMaster("local-cluster[2, 1, 1024]")
.setAppName("test-cluster")
.set("spark.akka.frameSize", "1") // set to 1MB to detect direct serialization of data
sc = new SparkContext(conf)
- super.beforeAll()
}
override def afterAll() {
- if (sc != null) {
- sc.stop()
+ try {
+ if (sc != null) {
+ sc.stop()
+ }
+ } finally {
+ super.afterAll()
}
- super.afterAll()
}
}
diff --git a/mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala b/mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala
index 378139593b..ebcd591465 100644
--- a/mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala
@@ -38,12 +38,15 @@ trait MLlibTestSparkContext extends BeforeAndAfterAll { self: Suite =>
}
override def afterAll() {
- sqlContext = null
- SQLContext.clearActive()
- if (sc != null) {
- sc.stop()
+ try {
+ sqlContext = null
+ SQLContext.clearActive()
+ if (sc != null) {
+ sc.stop()
+ }
+ sc = null
+ } finally {
+ super.afterAll()
}
- sc = null
- super.afterAll()
}
}