From e71c07557c39e2f74bd20d2ab3a2fca88aa5dfbb Mon Sep 17 00:00:00 2001 From: Xiangrui Meng Date: Thu, 12 Nov 2015 20:01:13 -0800 Subject: [SPARK-11672][ML] flaky spark.ml read/write tests We set `sqlContext = null` in `afterAll`. However, this doesn't change `SQLContext.activeContext` and then `SQLContext.getOrCreate` might use the `SparkContext` from previous test suite and hence causes the error. This PR calls `clearActive` in `beforeAll` and `afterAll` to avoid using an old context from other test suites. cc: yhuai Author: Xiangrui Meng Closes #9677 from mengxr/SPARK-11672.2. --- .../test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java | 4 ++-- .../org/apache/spark/ml/classification/LogisticRegressionSuite.scala | 2 +- mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala | 2 +- .../test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala | 2 +- .../scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala | 2 ++ 5 files changed, 7 insertions(+), 5 deletions(-) diff --git a/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java b/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java index 4f7aeac1ec..c39538014b 100644 --- a/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java +++ b/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java @@ -23,7 +23,7 @@ import java.io.IOException; import org.junit.After; import org.junit.Assert; import org.junit.Before; -import org.junit.Ignore; +import org.junit.Test; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.sql.SQLContext; @@ -50,7 +50,7 @@ public class JavaDefaultReadWriteSuite { Utils.deleteRecursively(tempDir); } - @Ignore // SPARK-11672 + @Test public void testDefaultReadWrite() throws IOException { String uid = "my_params"; MyParams instance = new MyParams(uid); diff --git a/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala index e4c2f1baa4..51b06b7eb6 100644 --- a/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala +++ b/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala @@ -872,7 +872,7 @@ class LogisticRegressionSuite assert(model1a0.intercept ~== model1b.intercept absTol 1E-3) } - ignore("read/write") { // SPARK-11672 + test("read/write") { // Set some Params to make sure set Params are serialized. val lr = new LogisticRegression() .setElasticNetParam(0.1) diff --git a/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala index a66fe03281..9dfa1439cc 100644 --- a/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala +++ b/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala @@ -68,7 +68,7 @@ class BinarizerSuite extends SparkFunSuite with MLlibTestSparkContext with Defau } } - ignore("read/write") { // SPARK-11672 + test("read/write") { val binarizer = new Binarizer() .setInputCol("feature") .setOutputCol("binarized_feature") diff --git a/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala b/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala index 44e09c38f9..cac4bd9aa3 100644 --- a/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala +++ b/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala @@ -105,7 +105,7 @@ object MyParams extends Readable[MyParams] { class DefaultReadWriteSuite extends SparkFunSuite with MLlibTestSparkContext with DefaultReadWriteTest { - ignore("default read/write") { // SPARK-11672 + test("default read/write") { val myParams = new MyParams("my_params") testDefaultReadWrite(myParams) } diff --git a/mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala b/mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala index 5d1796ef65..998ee48186 100644 --- a/mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala +++ b/mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala @@ -32,11 +32,13 @@ trait MLlibTestSparkContext extends BeforeAndAfterAll { self: Suite => .setMaster("local[2]") .setAppName("MLlibUnitTest") sc = new SparkContext(conf) + SQLContext.clearActive() sqlContext = new SQLContext(sc) } override def afterAll() { sqlContext = null + SQLContext.clearActive() if (sc != null) { sc.stop() } -- cgit v1.2.3