diff options
author | Michael Armbrust <michael@databricks.com> | 2014-06-13 16:09:47 -0700 |
---|---|---|
committer | Reynold Xin <rxin@apache.org> | 2014-06-13 16:09:47 -0700 |
commit | 7e3e9afdb0d89d3d9636e37da6413806d6dc611c (patch) | |
tree | 6792423accea69c0e7a0b1511874330bafb85fa5 /core | |
parent | 00b4317099f706295909dbe12ef3e141dd590dd1 (diff) | |
download | spark-7e3e9afdb0d89d3d9636e37da6413806d6dc611c.tar.gz spark-7e3e9afdb0d89d3d9636e37da6413806d6dc611c.tar.bz2 spark-7e3e9afdb0d89d3d9636e37da6413806d6dc611c.zip |
[SQL] Update SparkSQL and ScalaTest in branch-1.0 to match master.
#511 and #863 got left out of branch-1.0 since we were really close to the release. Now that they have been tested a little I see no reason to leave them out.
Author: Michael Armbrust <michael@databricks.com>
Author: witgo <witgo@qq.com>
Closes #1078 from marmbrus/branch-1.0 and squashes the following commits:
22be674 [witgo] [SPARK-1841]: update scalatest to version 2.1.5
fc8fc79 [Michael Armbrust] Include #1071 as well.
c5d0adf [Michael Armbrust] Update SparkSQL in branch-1.0 to match master.
Diffstat (limited to 'core')
6 files changed, 13 insertions, 11 deletions
diff --git a/core/pom.xml b/core/pom.xml index bd739e5341..ead33cd14d 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -235,7 +235,7 @@ </dependency> <dependency> <groupId>org.easymock</groupId> - <artifactId>easymock</artifactId> + <artifactId>easymockclassextension</artifactId> <scope>test</scope> </dependency> <dependency> diff --git a/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala b/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala index 5a83100908..dc2db66df6 100644 --- a/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala +++ b/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala @@ -25,7 +25,7 @@ import scala.language.postfixOps import scala.util.Random import org.scalatest.{BeforeAndAfter, FunSuite} -import org.scalatest.concurrent.Eventually +import org.scalatest.concurrent.{PatienceConfiguration, Eventually} import org.scalatest.concurrent.Eventually._ import org.scalatest.time.SpanSugar._ @@ -76,7 +76,7 @@ class ContextCleanerSuite extends FunSuite with BeforeAndAfter with LocalSparkCo tester.assertCleanup() // Verify that shuffles can be re-executed after cleaning up - assert(rdd.collect().toList === collected) + assert(rdd.collect().toList.equals(collected)) } test("cleanup broadcast") { @@ -285,7 +285,7 @@ class CleanerTester( sc.cleaner.get.attachListener(cleanerListener) /** Assert that all the stuff has been cleaned up */ - def assertCleanup()(implicit waitTimeout: Eventually.Timeout) { + def assertCleanup()(implicit waitTimeout: PatienceConfiguration.Timeout) { try { eventually(waitTimeout, interval(100 millis)) { assert(isAllCleanedUp) diff --git a/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala b/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala index 29d428aa7d..47df00050c 100644 --- a/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala +++ b/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala @@ -23,11 +23,11 @@ class ShuffleNettySuite extends ShuffleSuite with BeforeAndAfterAll { // This test suite should run all tests in ShuffleSuite with Netty shuffle mode. - override def beforeAll(configMap: Map[String, Any]) { + override def beforeAll() { System.setProperty("spark.shuffle.use.netty", "true") } - override def afterAll(configMap: Map[String, Any]) { + override def afterAll() { System.setProperty("spark.shuffle.use.netty", "false") } } diff --git a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala index fdbed45efe..87bfce3470 100644 --- a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala +++ b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala @@ -275,8 +275,9 @@ class RDDSuite extends FunSuite with SharedSparkContext { // we can optionally shuffle to keep the upstream parallel val coalesced5 = data.coalesce(1, shuffle = true) - assert(coalesced5.dependencies.head.rdd.dependencies.head.rdd.asInstanceOf[ShuffledRDD[_, _, _]] != - null) + val isEquals = coalesced5.dependencies.head.rdd.dependencies.head.rdd. + asInstanceOf[ShuffledRDD[_, _, _]] != null + assert(isEquals) // when shuffling, we can increase the number of partitions val coalesced6 = data.coalesce(20, shuffle = true) diff --git a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala index d172dd1ac8..7e901f8e91 100644 --- a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala +++ b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala @@ -23,7 +23,7 @@ import scala.language.reflectiveCalls import akka.actor._ import akka.testkit.{ImplicitSender, TestKit, TestActorRef} -import org.scalatest.{BeforeAndAfter, FunSuite} +import org.scalatest.{BeforeAndAfter, FunSuiteLike} import org.apache.spark._ import org.apache.spark.rdd.RDD @@ -37,7 +37,7 @@ class BuggyDAGEventProcessActor extends Actor { } } -class DAGSchedulerSuite extends TestKit(ActorSystem("DAGSchedulerSuite")) with FunSuite +class DAGSchedulerSuite extends TestKit(ActorSystem("DAGSchedulerSuite")) with FunSuiteLike with ImplicitSender with BeforeAndAfter with LocalSparkContext { val conf = new SparkConf diff --git a/core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala b/core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala index 6a5653ed2f..c1c605cdb4 100644 --- a/core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala @@ -105,7 +105,8 @@ class TimeStampedHashMapSuite extends FunSuite { map("k1") = strongRef map("k2") = "v2" map("k3") = "v3" - assert(map("k1") === strongRef) + val isEquals = map("k1") == strongRef + assert(isEquals) // clear strong reference to "k1" strongRef = null |