From 1a21be15f655b9696ddac80aac629445a465f621 Mon Sep 17 00:00:00 2001 From: Xiangrui Meng Date: Wed, 11 Nov 2015 15:41:36 -0800 Subject: [SPARK-11672][ML] disable spark.ml read/write tests Saw several failures on Jenkins, e.g., https://amplab.cs.berkeley.edu/jenkins/job/NewSparkPullRequestBuilder/2040/testReport/org.apache.spark.ml.util/JavaDefaultReadWriteSuite/testDefaultReadWrite/. This is the first failure in master build: https://amplab.cs.berkeley.edu/jenkins/job/Spark-Master-SBT/3982/ I cannot reproduce it on local. So temporarily disable the tests and I will look into the issue under the same JIRA. I'm going to merge the PR after Jenkins passes compile. Author: Xiangrui Meng Closes #9641 from mengxr/SPARK-11672. --- .../test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java | 4 ++-- .../org/apache/spark/ml/classification/LogisticRegressionSuite.scala | 2 +- mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala | 2 +- .../test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) (limited to 'mllib') diff --git a/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java b/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java index c39538014b..4f7aeac1ec 100644 --- a/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java +++ b/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java @@ -23,7 +23,7 @@ import java.io.IOException; import org.junit.After; import org.junit.Assert; import org.junit.Before; -import org.junit.Test; +import org.junit.Ignore; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.sql.SQLContext; @@ -50,7 +50,7 @@ public class JavaDefaultReadWriteSuite { Utils.deleteRecursively(tempDir); } - @Test + @Ignore // SPARK-11672 public void testDefaultReadWrite() throws IOException { String uid = "my_params"; MyParams instance = new MyParams(uid); diff --git a/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala index 51b06b7eb6..e4c2f1baa4 100644 --- a/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala +++ b/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala @@ -872,7 +872,7 @@ class LogisticRegressionSuite assert(model1a0.intercept ~== model1b.intercept absTol 1E-3) } - test("read/write") { + ignore("read/write") { // SPARK-11672 // Set some Params to make sure set Params are serialized. val lr = new LogisticRegression() .setElasticNetParam(0.1) diff --git a/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala index 9dfa1439cc..a66fe03281 100644 --- a/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala +++ b/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala @@ -68,7 +68,7 @@ class BinarizerSuite extends SparkFunSuite with MLlibTestSparkContext with Defau } } - test("read/write") { + ignore("read/write") { // SPARK-11672 val binarizer = new Binarizer() .setInputCol("feature") .setOutputCol("binarized_feature") diff --git a/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala b/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala index cac4bd9aa3..44e09c38f9 100644 --- a/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala +++ b/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala @@ -105,7 +105,7 @@ object MyParams extends Readable[MyParams] { class DefaultReadWriteSuite extends SparkFunSuite with MLlibTestSparkContext with DefaultReadWriteTest { - test("default read/write") { + ignore("default read/write") { // SPARK-11672 val myParams = new MyParams("my_params") testDefaultReadWrite(myParams) } -- cgit v1.2.3