aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorXiangrui Meng <meng@databricks.com>2015-11-12 20:01:13 -0800
committerXiangrui Meng <meng@databricks.com>2015-11-12 20:01:13 -0800
commite71c07557c39e2f74bd20d2ab3a2fca88aa5dfbb (patch)
tree0ff633ab5751f2b63a7bb568e1bb27d6308e07e7
parente4e46b20f6475f8e148d5326f7c88c57850d46a1 (diff)
downloadspark-e71c07557c39e2f74bd20d2ab3a2fca88aa5dfbb.tar.gz
spark-e71c07557c39e2f74bd20d2ab3a2fca88aa5dfbb.tar.bz2
spark-e71c07557c39e2f74bd20d2ab3a2fca88aa5dfbb.zip
[SPARK-11672][ML] flaky spark.ml read/write tests
We set `sqlContext = null` in `afterAll`. However, this doesn't change `SQLContext.activeContext` and then `SQLContext.getOrCreate` might use the `SparkContext` from previous test suite and hence causes the error. This PR calls `clearActive` in `beforeAll` and `afterAll` to avoid using an old context from other test suites. cc: yhuai Author: Xiangrui Meng <meng@databricks.com> Closes #9677 from mengxr/SPARK-11672.2.
-rw-r--r--mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java4
-rw-r--r--mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala2
-rw-r--r--mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala2
-rw-r--r--mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala2
-rw-r--r--mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala2
5 files changed, 7 insertions, 5 deletions
diff --git a/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java b/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java
index 4f7aeac1ec..c39538014b 100644
--- a/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java
+++ b/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java
@@ -23,7 +23,7 @@ import java.io.IOException;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
-import org.junit.Ignore;
+import org.junit.Test;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SQLContext;
@@ -50,7 +50,7 @@ public class JavaDefaultReadWriteSuite {
Utils.deleteRecursively(tempDir);
}
- @Ignore // SPARK-11672
+ @Test
public void testDefaultReadWrite() throws IOException {
String uid = "my_params";
MyParams instance = new MyParams(uid);
diff --git a/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala
index e4c2f1baa4..51b06b7eb6 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala
@@ -872,7 +872,7 @@ class LogisticRegressionSuite
assert(model1a0.intercept ~== model1b.intercept absTol 1E-3)
}
- ignore("read/write") { // SPARK-11672
+ test("read/write") {
// Set some Params to make sure set Params are serialized.
val lr = new LogisticRegression()
.setElasticNetParam(0.1)
diff --git a/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala
index a66fe03281..9dfa1439cc 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala
@@ -68,7 +68,7 @@ class BinarizerSuite extends SparkFunSuite with MLlibTestSparkContext with Defau
}
}
- ignore("read/write") { // SPARK-11672
+ test("read/write") {
val binarizer = new Binarizer()
.setInputCol("feature")
.setOutputCol("binarized_feature")
diff --git a/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala b/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala
index 44e09c38f9..cac4bd9aa3 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala
@@ -105,7 +105,7 @@ object MyParams extends Readable[MyParams] {
class DefaultReadWriteSuite extends SparkFunSuite with MLlibTestSparkContext
with DefaultReadWriteTest {
- ignore("default read/write") { // SPARK-11672
+ test("default read/write") {
val myParams = new MyParams("my_params")
testDefaultReadWrite(myParams)
}
diff --git a/mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala b/mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala
index 5d1796ef65..998ee48186 100644
--- a/mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala
@@ -32,11 +32,13 @@ trait MLlibTestSparkContext extends BeforeAndAfterAll { self: Suite =>
.setMaster("local[2]")
.setAppName("MLlibUnitTest")
sc = new SparkContext(conf)
+ SQLContext.clearActive()
sqlContext = new SQLContext(sc)
}
override def afterAll() {
sqlContext = null
+ SQLContext.clearActive()
if (sc != null) {
sc.stop()
}