aboutsummaryrefslogtreecommitdiff
path: root/core/src/test
diff options
context:
space:
mode:
authorMatei Zaharia <matei@eecs.berkeley.edu>2013-08-31 19:27:07 -0700
committerMatei Zaharia <matei@eecs.berkeley.edu>2013-09-01 14:13:13 -0700
commit46eecd110a4017ea0c86cbb1010d0ccd6a5eb2ef (patch)
tree4a46971b36680bc5ef51be81ada8eb47670f6b22 /core/src/test
parenta30fac16ca0525f2001b127e5f9518c9680844c9 (diff)
downloadspark-46eecd110a4017ea0c86cbb1010d0ccd6a5eb2ef.tar.gz
spark-46eecd110a4017ea0c86cbb1010d0ccd6a5eb2ef.tar.bz2
spark-46eecd110a4017ea0c86cbb1010d0ccd6a5eb2ef.zip
Initial work to rename package to org.apache.spark
Diffstat (limited to 'core/src/test')
-rw-r--r--core/src/test/resources/test_metrics_config.properties2
-rw-r--r--core/src/test/resources/test_metrics_system.properties6
-rw-r--r--core/src/test/scala/org/apache/spark/AccumulatorSuite.scala (renamed from core/src/test/scala/spark/AccumulatorSuite.scala)4
-rw-r--r--core/src/test/scala/org/apache/spark/BroadcastSuite.scala (renamed from core/src/test/scala/spark/BroadcastSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/CheckpointSuite.scala (renamed from core/src/test/scala/spark/CheckpointSuite.scala)6
-rw-r--r--core/src/test/scala/org/apache/spark/ClosureCleanerSuite.scala (renamed from core/src/test/scala/spark/ClosureCleanerSuite.scala)4
-rw-r--r--core/src/test/scala/org/apache/spark/DistributedSuite.scala (renamed from core/src/test/scala/spark/DistributedSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/DriverSuite.scala (renamed from core/src/test/scala/spark/DriverSuite.scala)4
-rw-r--r--core/src/test/scala/org/apache/spark/FailureSuite.scala (renamed from core/src/test/scala/spark/FailureSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/FileServerSuite.scala (renamed from core/src/test/scala/spark/FileServerSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/FileSuite.scala (renamed from core/src/test/scala/spark/FileSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/JavaAPISuite.java (renamed from core/src/test/scala/spark/JavaAPISuite.java)20
-rw-r--r--core/src/test/scala/org/apache/spark/KryoSerializerSuite.scala (renamed from core/src/test/scala/spark/KryoSerializerSuite.scala)4
-rw-r--r--core/src/test/scala/org/apache/spark/LocalSparkContext.scala (renamed from core/src/test/scala/spark/LocalSparkContext.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala (renamed from core/src/test/scala/spark/MapOutputTrackerSuite.scala)8
-rw-r--r--core/src/test/scala/org/apache/spark/PairRDDFunctionsSuite.scala (renamed from core/src/test/scala/spark/PairRDDFunctionsSuite.scala)4
-rw-r--r--core/src/test/scala/org/apache/spark/PartitionPruningRDDSuite.scala (renamed from core/src/test/scala/spark/PartitionPruningRDDSuite.scala)6
-rw-r--r--core/src/test/scala/org/apache/spark/PartitioningSuite.scala (renamed from core/src/test/scala/spark/PartitioningSuite.scala)4
-rw-r--r--core/src/test/scala/org/apache/spark/PipedRDDSuite.scala (renamed from core/src/test/scala/spark/PipedRDDSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/RDDSuite.scala (renamed from core/src/test/scala/spark/RDDSuite.scala)6
-rw-r--r--core/src/test/scala/org/apache/spark/SharedSparkContext.scala (renamed from core/src/test/scala/spark/SharedSparkContext.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala (renamed from core/src/test/scala/spark/ShuffleNettySuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/ShuffleSuite.scala (renamed from core/src/test/scala/spark/ShuffleSuite.scala)16
-rw-r--r--core/src/test/scala/org/apache/spark/SizeEstimatorSuite.scala (renamed from core/src/test/scala/spark/SizeEstimatorSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/SortingSuite.scala (renamed from core/src/test/scala/spark/SortingSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala (renamed from core/src/test/scala/spark/SparkContextInfoSuite.scala)6
-rw-r--r--core/src/test/scala/org/apache/spark/ThreadingSuite.scala (renamed from core/src/test/scala/spark/ThreadingSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/UnpersistSuite.scala (renamed from core/src/test/scala/spark/UnpersistSuite.scala)4
-rw-r--r--core/src/test/scala/org/apache/spark/UtilsSuite.scala (renamed from core/src/test/scala/spark/UtilsSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/ZippedPartitionsSuite.scala (renamed from core/src/test/scala/spark/ZippedPartitionsSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala (renamed from core/src/test/scala/spark/io/CompressionCodecSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala (renamed from core/src/test/scala/spark/metrics/MetricsConfigSuite.scala)14
-rw-r--r--core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala (renamed from core/src/test/scala/spark/metrics/MetricsSystemSuite.scala)5
-rw-r--r--core/src/test/scala/org/apache/spark/rdd/JdbcRDDSuite.scala (renamed from core/src/test/scala/spark/rdd/JdbcRDDSuite.scala)6
-rw-r--r--core/src/test/scala/org/apache/spark/rdd/ParallelCollectionSplitSuite.scala (renamed from core/src/test/scala/spark/rdd/ParallelCollectionSplitSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala (renamed from core/src/test/scala/spark/scheduler/DAGSchedulerSuite.scala)28
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/JobLoggerSuite.scala (renamed from core/src/test/scala/spark/scheduler/JobLoggerSuite.scala)6
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala (renamed from core/src/test/scala/spark/scheduler/SparkListenerSuite.scala)6
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala (renamed from core/src/test/scala/spark/scheduler/TaskContextSuite.scala)12
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/cluster/ClusterSchedulerSuite.scala (renamed from core/src/test/scala/spark/scheduler/cluster/ClusterSchedulerSuite.scala)8
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/cluster/ClusterTaskSetManagerSuite.scala (renamed from core/src/test/scala/spark/scheduler/cluster/ClusterTaskSetManagerSuite.scala)10
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/cluster/FakeTask.scala (renamed from core/src/test/scala/spark/scheduler/cluster/FakeTask.scala)4
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/local/LocalSchedulerSuite.scala (renamed from core/src/test/scala/spark/scheduler/local/LocalSchedulerSuite.scala)8
-rw-r--r--core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala (renamed from core/src/test/scala/spark/storage/BlockManagerSuite.scala)33
-rw-r--r--core/src/test/scala/org/apache/spark/ui/UISuite.scala (renamed from core/src/test/scala/spark/ui/UISuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/util/DistributionSuite.scala (renamed from core/src/test/scala/spark/util/DistributionSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/util/FakeClock.scala (renamed from core/src/test/scala/spark/util/FakeClock.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala (renamed from core/src/test/scala/spark/util/NextIteratorSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/util/RateLimitedOutputStreamSuite.scala (renamed from core/src/test/scala/spark/util/RateLimitedOutputStreamSuite.scala)2
49 files changed, 144 insertions, 142 deletions
diff --git a/core/src/test/resources/test_metrics_config.properties b/core/src/test/resources/test_metrics_config.properties
index 2b31ddf2eb..056a158456 100644
--- a/core/src/test/resources/test_metrics_config.properties
+++ b/core/src/test/resources/test_metrics_config.properties
@@ -1,6 +1,6 @@
*.sink.console.period = 10
*.sink.console.unit = seconds
-*.source.jvm.class = spark.metrics.source.JvmSource
+*.source.jvm.class = org.apache.spark.metrics.source.JvmSource
master.sink.console.period = 20
master.sink.console.unit = minutes
diff --git a/core/src/test/resources/test_metrics_system.properties b/core/src/test/resources/test_metrics_system.properties
index d5479f0298..6f5ecea93a 100644
--- a/core/src/test/resources/test_metrics_system.properties
+++ b/core/src/test/resources/test_metrics_system.properties
@@ -1,7 +1,7 @@
*.sink.console.period = 10
*.sink.console.unit = seconds
-test.sink.console.class = spark.metrics.sink.ConsoleSink
-test.sink.dummy.class = spark.metrics.sink.DummySink
-test.source.dummy.class = spark.metrics.source.DummySource
+test.sink.console.class = org.apache.spark.metrics.sink.ConsoleSink
+test.sink.dummy.class = org.apache.spark.metrics.sink.DummySink
+test.source.dummy.class = org.apache.spark.metrics.source.DummySource
test.sink.console.period = 20
test.sink.console.unit = minutes
diff --git a/core/src/test/scala/spark/AccumulatorSuite.scala b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
index 0af175f316..4434f3b87c 100644
--- a/core/src/test/scala/spark/AccumulatorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
@@ -23,7 +23,7 @@ import collection.mutable
import java.util.Random
import scala.math.exp
import scala.math.signum
-import spark.SparkContext._
+import org.apache.spark.SparkContext._
class AccumulatorSuite extends FunSuite with ShouldMatchers with LocalSparkContext {
diff --git a/core/src/test/scala/spark/BroadcastSuite.scala b/core/src/test/scala/org/apache/spark/BroadcastSuite.scala
index 785721ece8..b3a53d928b 100644
--- a/core/src/test/scala/spark/BroadcastSuite.scala
+++ b/core/src/test/scala/org/apache/spark/BroadcastSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.FunSuite
diff --git a/core/src/test/scala/spark/CheckpointSuite.scala b/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
index 966dede2be..23b14f4245 100644
--- a/core/src/test/scala/spark/CheckpointSuite.scala
+++ b/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.FunSuite
import java.io.File
-import spark.rdd._
-import spark.SparkContext._
+import org.apache.spark.rdd._
+import org.apache.spark.SparkContext._
import storage.StorageLevel
class CheckpointSuite extends FunSuite with LocalSparkContext with Logging {
diff --git a/core/src/test/scala/spark/ClosureCleanerSuite.scala b/core/src/test/scala/org/apache/spark/ClosureCleanerSuite.scala
index 7d2831e19c..8494899b98 100644
--- a/core/src/test/scala/spark/ClosureCleanerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ClosureCleanerSuite.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import java.io.NotSerializableException
import org.scalatest.FunSuite
-import spark.LocalSparkContext._
+import org.apache.spark.LocalSparkContext._
import SparkContext._
class ClosureCleanerSuite extends FunSuite {
diff --git a/core/src/test/scala/spark/DistributedSuite.scala b/core/src/test/scala/org/apache/spark/DistributedSuite.scala
index e11efe459c..7a856d4081 100644
--- a/core/src/test/scala/spark/DistributedSuite.scala
+++ b/core/src/test/scala/org/apache/spark/DistributedSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import network.ConnectionManagerId
import org.scalatest.FunSuite
diff --git a/core/src/test/scala/spark/DriverSuite.scala b/core/src/test/scala/org/apache/spark/DriverSuite.scala
index 553c0309f6..b08aad1a6f 100644
--- a/core/src/test/scala/spark/DriverSuite.scala
+++ b/core/src/test/scala/org/apache/spark/DriverSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import java.io.File
@@ -34,7 +34,7 @@ class DriverSuite extends FunSuite with Timeouts {
val masters = Table(("master"), ("local"), ("local-cluster[2,1,512]"))
forAll(masters) { (master: String) =>
failAfter(30 seconds) {
- Utils.execute(Seq("./spark-class", "spark.DriverWithoutCleanup", master),
+ Utils.execute(Seq("./spark-class", "org.apache.spark.DriverWithoutCleanup", master),
new File(System.getenv("SPARK_HOME")))
}
}
diff --git a/core/src/test/scala/spark/FailureSuite.scala b/core/src/test/scala/org/apache/spark/FailureSuite.scala
index 5b133cdd6e..ee89a7a387 100644
--- a/core/src/test/scala/spark/FailureSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FailureSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.FunSuite
diff --git a/core/src/test/scala/spark/FileServerSuite.scala b/core/src/test/scala/org/apache/spark/FileServerSuite.scala
index 242ae971f8..35d1d41af1 100644
--- a/core/src/test/scala/spark/FileServerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FileServerSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import com.google.common.io.Files
import org.scalatest.FunSuite
diff --git a/core/src/test/scala/spark/FileSuite.scala b/core/src/test/scala/org/apache/spark/FileSuite.scala
index 1e2c257c4b..7b82a4cdd9 100644
--- a/core/src/test/scala/spark/FileSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FileSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import java.io.{FileWriter, PrintWriter, File}
diff --git a/core/src/test/scala/spark/JavaAPISuite.java b/core/src/test/scala/org/apache/spark/JavaAPISuite.java
index c337c49268..8a869c9005 100644
--- a/core/src/test/scala/spark/JavaAPISuite.java
+++ b/core/src/test/scala/org/apache/spark/JavaAPISuite.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark;
+package org.apache.spark;
import java.io.File;
import java.io.IOException;
@@ -38,15 +38,15 @@ import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
-import spark.api.java.JavaDoubleRDD;
-import spark.api.java.JavaPairRDD;
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
-import spark.api.java.function.*;
-import spark.partial.BoundedDouble;
-import spark.partial.PartialResult;
-import spark.storage.StorageLevel;
-import spark.util.StatCounter;
+import org.apache.spark.api.java.JavaDoubleRDD;
+import org.apache.spark.api.java.JavaPairRDD;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.*;
+import org.apache.spark.partial.BoundedDouble;
+import org.apache.spark.partial.PartialResult;
+import org.apache.spark.storage.StorageLevel;
+import org.apache.spark.util.StatCounter;
// The test suite itself is Serializable so that anonymous Function implementations can be
diff --git a/core/src/test/scala/spark/KryoSerializerSuite.scala b/core/src/test/scala/org/apache/spark/KryoSerializerSuite.scala
index 7568a0bf65..d7b23c93fe 100644
--- a/core/src/test/scala/spark/KryoSerializerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/KryoSerializerSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import scala.collection.mutable
@@ -167,7 +167,7 @@ class KryoSerializerSuite extends FunSuite with SharedSparkContext {
}
override def beforeAll() {
- System.setProperty("spark.serializer", "spark.KryoSerializer")
+ System.setProperty("spark.serializer", "org.apache.spark.KryoSerializer")
System.setProperty("spark.kryo.registrator", classOf[MyRegistrator].getName)
super.beforeAll()
}
diff --git a/core/src/test/scala/spark/LocalSparkContext.scala b/core/src/test/scala/org/apache/spark/LocalSparkContext.scala
index ddc212d290..6ec124da9c 100644
--- a/core/src/test/scala/spark/LocalSparkContext.scala
+++ b/core/src/test/scala/org/apache/spark/LocalSparkContext.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.Suite
import org.scalatest.BeforeAndAfterEach
diff --git a/core/src/test/scala/spark/MapOutputTrackerSuite.scala b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
index c21f3331d0..6013320eaa 100644
--- a/core/src/test/scala/spark/MapOutputTrackerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
@@ -15,14 +15,14 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.FunSuite
import akka.actor._
-import spark.scheduler.MapStatus
-import spark.storage.BlockManagerId
-import spark.util.AkkaUtils
+import org.apache.spark.scheduler.MapStatus
+import org.apache.spark.storage.BlockManagerId
+import org.apache.spark.util.AkkaUtils
class MapOutputTrackerSuite extends FunSuite with LocalSparkContext {
diff --git a/core/src/test/scala/spark/PairRDDFunctionsSuite.scala b/core/src/test/scala/org/apache/spark/PairRDDFunctionsSuite.scala
index 328b3b5497..f79752b34e 100644
--- a/core/src/test/scala/spark/PairRDDFunctionsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/PairRDDFunctionsSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.HashSet
@@ -23,7 +23,7 @@ import scala.collection.mutable.HashSet
import org.scalatest.FunSuite
import com.google.common.io.Files
-import spark.SparkContext._
+import org.apache.spark.SparkContext._
class PairRDDFunctionsSuite extends FunSuite with SharedSparkContext {
diff --git a/core/src/test/scala/spark/PartitionPruningRDDSuite.scala b/core/src/test/scala/org/apache/spark/PartitionPruningRDDSuite.scala
index 88352b639f..adbe805916 100644
--- a/core/src/test/scala/spark/PartitionPruningRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/PartitionPruningRDDSuite.scala
@@ -1,8 +1,8 @@
-package spark
+package org.apache.spark
import org.scalatest.FunSuite
-import spark.SparkContext._
-import spark.rdd.PartitionPruningRDD
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.PartitionPruningRDD
class PartitionPruningRDDSuite extends FunSuite with SharedSparkContext {
diff --git a/core/src/test/scala/spark/PartitioningSuite.scala b/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
index b1e0b2b4d0..7669cf6fb1 100644
--- a/core/src/test/scala/spark/PartitioningSuite.scala
+++ b/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.FunSuite
import scala.collection.mutable.ArrayBuffer
import SparkContext._
-import spark.util.StatCounter
+import org.apache.spark.util.StatCounter
import scala.math.abs
class PartitioningSuite extends FunSuite with SharedSparkContext {
diff --git a/core/src/test/scala/spark/PipedRDDSuite.scala b/core/src/test/scala/org/apache/spark/PipedRDDSuite.scala
index 35c04710a3..2e851d892d 100644
--- a/core/src/test/scala/spark/PipedRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/PipedRDDSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.FunSuite
import SparkContext._
diff --git a/core/src/test/scala/spark/RDDSuite.scala b/core/src/test/scala/org/apache/spark/RDDSuite.scala
index e306952bbd..342ba8adb2 100644
--- a/core/src/test/scala/spark/RDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/RDDSuite.scala
@@ -15,14 +15,14 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import scala.collection.mutable.HashMap
import org.scalatest.FunSuite
import org.scalatest.concurrent.Timeouts._
import org.scalatest.time.{Span, Millis}
-import spark.SparkContext._
-import spark.rdd._
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd._
import scala.collection.parallel.mutable
class RDDSuite extends FunSuite with SharedSparkContext {
diff --git a/core/src/test/scala/spark/SharedSparkContext.scala b/core/src/test/scala/org/apache/spark/SharedSparkContext.scala
index 70c24515be..97cbca09bf 100644
--- a/core/src/test/scala/spark/SharedSparkContext.scala
+++ b/core/src/test/scala/org/apache/spark/SharedSparkContext.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.Suite
import org.scalatest.BeforeAndAfterAll
diff --git a/core/src/test/scala/spark/ShuffleNettySuite.scala b/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala
index 6bad6c1d13..e121b162ad 100644
--- a/core/src/test/scala/spark/ShuffleNettySuite.scala
+++ b/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.BeforeAndAfterAll
diff --git a/core/src/test/scala/spark/ShuffleSuite.scala b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
index 8745689c70..357175e89e 100644
--- a/core/src/test/scala/spark/ShuffleSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
@@ -15,15 +15,15 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
-import spark.SparkContext._
-import spark.ShuffleSuite.NonJavaSerializableClass
-import spark.rdd.{SubtractedRDD, CoGroupedRDD, OrderedRDDFunctions, ShuffledRDD}
-import spark.util.MutablePair
+import org.apache.spark.SparkContext._
+import org.apache.spark.ShuffleSuite.NonJavaSerializableClass
+import org.apache.spark.rdd.{SubtractedRDD, CoGroupedRDD, OrderedRDDFunctions, ShuffledRDD}
+import org.apache.spark.util.MutablePair
class ShuffleSuite extends FunSuite with ShouldMatchers with LocalSparkContext {
@@ -54,7 +54,7 @@ class ShuffleSuite extends FunSuite with ShouldMatchers with LocalSparkContext {
// If the Kryo serializer is not used correctly, the shuffle would fail because the
// default Java serializer cannot handle the non serializable class.
val c = new ShuffledRDD[Int, NonJavaSerializableClass, (Int, NonJavaSerializableClass)](
- b, new HashPartitioner(NUM_BLOCKS)).setSerializer(classOf[spark.KryoSerializer].getName)
+ b, new HashPartitioner(NUM_BLOCKS)).setSerializer(classOf[KryoSerializer].getName)
val shuffleId = c.dependencies.head.asInstanceOf[ShuffleDependency[Int, Int]].shuffleId
assert(c.count === 10)
@@ -76,7 +76,7 @@ class ShuffleSuite extends FunSuite with ShouldMatchers with LocalSparkContext {
// If the Kryo serializer is not used correctly, the shuffle would fail because the
// default Java serializer cannot handle the non serializable class.
val c = new ShuffledRDD[Int, NonJavaSerializableClass, (Int, NonJavaSerializableClass)](
- b, new HashPartitioner(3)).setSerializer(classOf[spark.KryoSerializer].getName)
+ b, new HashPartitioner(3)).setSerializer(classOf[KryoSerializer].getName)
assert(c.count === 10)
}
@@ -92,7 +92,7 @@ class ShuffleSuite extends FunSuite with ShouldMatchers with LocalSparkContext {
// NOTE: The default Java serializer doesn't create zero-sized blocks.
// So, use Kryo
val c = new ShuffledRDD[Int, Int, (Int, Int)](b, new HashPartitioner(10))
- .setSerializer(classOf[spark.KryoSerializer].getName)
+ .setSerializer(classOf[KryoSerializer].getName)
val shuffleId = c.dependencies.head.asInstanceOf[ShuffleDependency[Int, Int]].shuffleId
assert(c.count === 4)
diff --git a/core/src/test/scala/spark/SizeEstimatorSuite.scala b/core/src/test/scala/org/apache/spark/SizeEstimatorSuite.scala
index 1ef812dfbd..214ac74898 100644
--- a/core/src/test/scala/spark/SizeEstimatorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SizeEstimatorSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.FunSuite
import org.scalatest.BeforeAndAfterAll
diff --git a/core/src/test/scala/spark/SortingSuite.scala b/core/src/test/scala/org/apache/spark/SortingSuite.scala
index b933c4aab8..f4fa9511dd 100644
--- a/core/src/test/scala/spark/SortingSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SortingSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.FunSuite
import org.scalatest.BeforeAndAfter
diff --git a/core/src/test/scala/spark/SparkContextInfoSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala
index 6d50bf5e1b..939fe51801 100644
--- a/core/src/test/scala/spark/SparkContextInfoSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.FunSuite
-import spark.SparkContext._
+import org.apache.spark.SparkContext._
class SparkContextInfoSuite extends FunSuite with LocalSparkContext {
test("getPersistentRDDs only returns RDDs that are marked as cached") {
@@ -57,4 +57,4 @@ class SparkContextInfoSuite extends FunSuite with LocalSparkContext {
rdd.collect()
assert(sc.getRDDStorageInfo.size === 1)
}
-} \ No newline at end of file
+}
diff --git a/core/src/test/scala/spark/ThreadingSuite.scala b/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
index f2acd0bd3c..69383ddfb8 100644
--- a/core/src/test/scala/spark/ThreadingSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import java.util.concurrent.Semaphore
import java.util.concurrent.atomic.AtomicBoolean
diff --git a/core/src/test/scala/spark/UnpersistSuite.scala b/core/src/test/scala/org/apache/spark/UnpersistSuite.scala
index 93977d16f4..46a2da1724 100644
--- a/core/src/test/scala/spark/UnpersistSuite.scala
+++ b/core/src/test/scala/org/apache/spark/UnpersistSuite.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.FunSuite
import org.scalatest.concurrent.Timeouts._
import org.scalatest.time.{Span, Millis}
-import spark.SparkContext._
+import org.apache.spark.SparkContext._
class UnpersistSuite extends FunSuite with LocalSparkContext {
test("unpersist RDD") {
diff --git a/core/src/test/scala/spark/UtilsSuite.scala b/core/src/test/scala/org/apache/spark/UtilsSuite.scala
index 98a6c1a1c9..3a908720a8 100644
--- a/core/src/test/scala/spark/UtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/UtilsSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import com.google.common.base.Charsets
import com.google.common.io.Files
diff --git a/core/src/test/scala/spark/ZippedPartitionsSuite.scala b/core/src/test/scala/org/apache/spark/ZippedPartitionsSuite.scala
index bb5d379273..618b9c113b 100644
--- a/core/src/test/scala/spark/ZippedPartitionsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ZippedPartitionsSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import scala.collection.immutable.NumericRange
diff --git a/core/src/test/scala/spark/io/CompressionCodecSuite.scala b/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala
index 1ba82fe2b9..fd6f69041a 100644
--- a/core/src/test/scala/spark/io/CompressionCodecSuite.scala
+++ b/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.io
+package org.apache.spark.io
import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
diff --git a/core/src/test/scala/spark/metrics/MetricsConfigSuite.scala b/core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala
index b0213b62d9..58c94a162d 100644
--- a/core/src/test/scala/spark/metrics/MetricsConfigSuite.scala
+++ b/core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.metrics
+package org.apache.spark.metrics
import org.scalatest.{BeforeAndAfter, FunSuite}
@@ -35,7 +35,7 @@ class MetricsConfigSuite extends FunSuite with BeforeAndAfter {
val property = conf.getInstance("random")
assert(property.size() === 3)
- assert(property.getProperty("sink.servlet.class") === "spark.metrics.sink.MetricsServlet")
+ assert(property.getProperty("sink.servlet.class") === "org.apache.spark.metrics.sink.MetricsServlet")
assert(property.getProperty("sink.servlet.uri") === "/metrics/json")
assert(property.getProperty("sink.servlet.sample") === "false")
}
@@ -48,8 +48,8 @@ class MetricsConfigSuite extends FunSuite with BeforeAndAfter {
assert(masterProp.size() === 6)
assert(masterProp.getProperty("sink.console.period") === "20")
assert(masterProp.getProperty("sink.console.unit") === "minutes")
- assert(masterProp.getProperty("source.jvm.class") === "spark.metrics.source.JvmSource")
- assert(masterProp.getProperty("sink.servlet.class") === "spark.metrics.sink.MetricsServlet")
+ assert(masterProp.getProperty("source.jvm.class") === "org.apache.spark.metrics.source.JvmSource")
+ assert(masterProp.getProperty("sink.servlet.class") === "org.apache.spark.metrics.sink.MetricsServlet")
assert(masterProp.getProperty("sink.servlet.uri") === "/metrics/master/json")
assert(masterProp.getProperty("sink.servlet.sample") === "false")
@@ -57,8 +57,8 @@ class MetricsConfigSuite extends FunSuite with BeforeAndAfter {
assert(workerProp.size() === 6)
assert(workerProp.getProperty("sink.console.period") === "10")
assert(workerProp.getProperty("sink.console.unit") === "seconds")
- assert(workerProp.getProperty("source.jvm.class") === "spark.metrics.source.JvmSource")
- assert(workerProp.getProperty("sink.servlet.class") === "spark.metrics.sink.MetricsServlet")
+ assert(workerProp.getProperty("source.jvm.class") === "org.apache.spark.metrics.source.JvmSource")
+ assert(workerProp.getProperty("sink.servlet.class") === "org.apache.spark.metrics.sink.MetricsServlet")
assert(workerProp.getProperty("sink.servlet.uri") === "/metrics/json")
assert(workerProp.getProperty("sink.servlet.sample") === "false")
}
@@ -73,7 +73,7 @@ class MetricsConfigSuite extends FunSuite with BeforeAndAfter {
val masterProp = conf.getInstance("master")
val sourceProps = conf.subProperties(masterProp, MetricsSystem.SOURCE_REGEX)
assert(sourceProps.size === 1)
- assert(sourceProps("jvm").getProperty("class") === "spark.metrics.source.JvmSource")
+ assert(sourceProps("jvm").getProperty("class") === "org.apache.spark.metrics.source.JvmSource")
val sinkProps = conf.subProperties(masterProp, MetricsSystem.SINK_REGEX)
assert(sinkProps.size === 2)
diff --git a/core/src/test/scala/spark/metrics/MetricsSystemSuite.scala b/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala
index dc65ac6994..7181333adf 100644
--- a/core/src/test/scala/spark/metrics/MetricsSystemSuite.scala
+++ b/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala
@@ -15,9 +15,10 @@
* limitations under the License.
*/
-package spark.metrics
+package org.apache.spark.metrics
import org.scalatest.{BeforeAndAfter, FunSuite}
+import org.apache.spark.deploy.master.MasterSource
class MetricsSystemSuite extends FunSuite with BeforeAndAfter {
var filePath: String = _
@@ -46,7 +47,7 @@ class MetricsSystemSuite extends FunSuite with BeforeAndAfter {
assert(sinks.length === 1)
assert(!metricsSystem.getServletHandlers.isEmpty)
- val source = new spark.deploy.master.MasterSource(null)
+ val source = new MasterSource(null)
metricsSystem.registerSource(source)
assert(sources.length === 1)
}
diff --git a/core/src/test/scala/spark/rdd/JdbcRDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/JdbcRDDSuite.scala
index dc8ca941c1..3d39a31252 100644
--- a/core/src/test/scala/spark/rdd/JdbcRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/JdbcRDDSuite.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.{ BeforeAndAfter, FunSuite }
-import spark.SparkContext._
-import spark.rdd.JdbcRDD
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.JdbcRDD
import java.sql._
class JdbcRDDSuite extends FunSuite with BeforeAndAfter with LocalSparkContext {
diff --git a/core/src/test/scala/spark/rdd/ParallelCollectionSplitSuite.scala b/core/src/test/scala/org/apache/spark/rdd/ParallelCollectionSplitSuite.scala
index d1276d541f..a80afdee7e 100644
--- a/core/src/test/scala/spark/rdd/ParallelCollectionSplitSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/ParallelCollectionSplitSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
import scala.collection.immutable.NumericRange
diff --git a/core/src/test/scala/spark/scheduler/DAGSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
index 3b4a0d52fc..94df282b28 100644
--- a/core/src/test/scala/spark/scheduler/DAGSchedulerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
@@ -15,26 +15,26 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
import scala.collection.mutable.{Map, HashMap}
import org.scalatest.FunSuite
import org.scalatest.BeforeAndAfter
-import spark.LocalSparkContext
-import spark.MapOutputTracker
-import spark.RDD
-import spark.SparkContext
-import spark.Partition
-import spark.TaskContext
-import spark.{Dependency, ShuffleDependency, OneToOneDependency}
-import spark.{FetchFailed, Success, TaskEndReason}
-import spark.storage.{BlockManagerId, BlockManagerMaster}
-
-import spark.scheduler.cluster.Pool
-import spark.scheduler.cluster.SchedulingMode
-import spark.scheduler.cluster.SchedulingMode.SchedulingMode
+import org.apache.spark.LocalSparkContext
+import org.apache.spark.MapOutputTracker
+import org.apache.spark.RDD
+import org.apache.spark.SparkContext
+import org.apache.spark.Partition
+import org.apache.spark.TaskContext
+import org.apache.spark.{Dependency, ShuffleDependency, OneToOneDependency}
+import org.apache.spark.{FetchFailed, Success, TaskEndReason}
+import org.apache.spark.storage.{BlockManagerId, BlockManagerMaster}
+
+import org.apache.spark.scheduler.cluster.Pool
+import org.apache.spark.scheduler.cluster.SchedulingMode
+import org.apache.spark.scheduler.cluster.SchedulingMode.SchedulingMode
/**
* Tests for DAGScheduler. These tests directly call the event processing functions in DAGScheduler
diff --git a/core/src/test/scala/spark/scheduler/JobLoggerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/JobLoggerSuite.scala
index bb9e715f95..f5b3e97222 100644
--- a/core/src/test/scala/spark/scheduler/JobLoggerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/JobLoggerSuite.scala
@@ -15,15 +15,15 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
import java.util.Properties
import java.util.concurrent.LinkedBlockingQueue
import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
import scala.collection.mutable
-import spark._
-import spark.SparkContext._
+import org.apache.spark._
+import org.apache.spark.SparkContext._
class JobLoggerSuite extends FunSuite with LocalSparkContext with ShouldMatchers {
diff --git a/core/src/test/scala/spark/scheduler/SparkListenerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
index 392d67d67b..aac7c207cb 100644
--- a/core/src/test/scala/spark/scheduler/SparkListenerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
import org.scalatest.FunSuite
-import spark.{SparkContext, LocalSparkContext}
+import org.apache.spark.{SparkContext, LocalSparkContext}
import scala.collection.mutable
import org.scalatest.matchers.ShouldMatchers
-import spark.SparkContext._
+import org.apache.spark.SparkContext._
/**
*
diff --git a/core/src/test/scala/spark/scheduler/TaskContextSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala
index 95a6eee2fc..0347cc02d7 100644
--- a/core/src/test/scala/spark/scheduler/TaskContextSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala
@@ -15,15 +15,15 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
import org.scalatest.FunSuite
import org.scalatest.BeforeAndAfter
-import spark.TaskContext
-import spark.RDD
-import spark.SparkContext
-import spark.Partition
-import spark.LocalSparkContext
+import org.apache.spark.TaskContext
+import org.apache.spark.RDD
+import org.apache.spark.SparkContext
+import org.apache.spark.Partition
+import org.apache.spark.LocalSparkContext
class TaskContextSuite extends FunSuite with BeforeAndAfter with LocalSparkContext {
diff --git a/core/src/test/scala/spark/scheduler/cluster/ClusterSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/cluster/ClusterSchedulerSuite.scala
index abfdabf5fe..92ad9f09b2 100644
--- a/core/src/test/scala/spark/scheduler/cluster/ClusterSchedulerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/cluster/ClusterSchedulerSuite.scala
@@ -15,14 +15,14 @@
* limitations under the License.
*/
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
import org.scalatest.FunSuite
import org.scalatest.BeforeAndAfter
-import spark._
-import spark.scheduler._
-import spark.scheduler.cluster._
+import org.apache.spark._
+import org.apache.spark.scheduler._
+import org.apache.spark.scheduler.cluster._
import scala.collection.mutable.ArrayBuffer
import java.util.Properties
diff --git a/core/src/test/scala/spark/scheduler/cluster/ClusterTaskSetManagerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/cluster/ClusterTaskSetManagerSuite.scala
index 5a0b949ef5..a4f63baf3d 100644
--- a/core/src/test/scala/spark/scheduler/cluster/ClusterTaskSetManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/cluster/ClusterTaskSetManagerSuite.scala
@@ -15,18 +15,18 @@
* limitations under the License.
*/
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable
import org.scalatest.FunSuite
-import spark._
-import spark.scheduler._
-import spark.executor.TaskMetrics
+import org.apache.spark._
+import org.apache.spark.scheduler._
+import org.apache.spark.executor.TaskMetrics
import java.nio.ByteBuffer
-import spark.util.FakeClock
+import org.apache.spark.util.FakeClock
/**
* A mock ClusterScheduler implementation that just remembers information about tasks started and
diff --git a/core/src/test/scala/spark/scheduler/cluster/FakeTask.scala b/core/src/test/scala/org/apache/spark/scheduler/cluster/FakeTask.scala
index de9e66be20..2f12aaed18 100644
--- a/core/src/test/scala/spark/scheduler/cluster/FakeTask.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/cluster/FakeTask.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
-import spark.scheduler.{TaskLocation, Task}
+import org.apache.spark.scheduler.{TaskLocation, Task}
class FakeTask(stageId: Int, prefLocs: Seq[TaskLocation] = Nil) extends Task[Int](stageId) {
override def run(attemptId: Long): Int = 0
diff --git a/core/src/test/scala/spark/scheduler/local/LocalSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/local/LocalSchedulerSuite.scala
index d28ee47fa3..111340a65c 100644
--- a/core/src/test/scala/spark/scheduler/local/LocalSchedulerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/local/LocalSchedulerSuite.scala
@@ -15,14 +15,14 @@
* limitations under the License.
*/
-package spark.scheduler.local
+package org.apache.spark.scheduler.local
import org.scalatest.FunSuite
import org.scalatest.BeforeAndAfter
-import spark._
-import spark.scheduler._
-import spark.scheduler.cluster._
+import org.apache.spark._
+import org.apache.spark.scheduler._
+import org.apache.spark.scheduler.cluster._
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.{ConcurrentMap, HashMap}
import java.util.concurrent.Semaphore
diff --git a/core/src/test/scala/spark/storage/BlockManagerSuite.scala b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
index b719d65342..88ba10f2f2 100644
--- a/core/src/test/scala/spark/storage/BlockManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.storage
+package org.apache.spark.storage
import java.nio.ByteBuffer
@@ -29,11 +29,12 @@ import org.scalatest.concurrent.Timeouts._
import org.scalatest.matchers.ShouldMatchers._
import org.scalatest.time.SpanSugar._
-import spark.JavaSerializer
-import spark.KryoSerializer
-import spark.SizeEstimator
-import spark.util.AkkaUtils
-import spark.util.ByteBufferInputStream
+import org.apache.spark.JavaSerializer
+import org.apache.spark.KryoSerializer
+import org.apache.spark.SizeEstimator
+import org.apache.spark.Utils
+import org.apache.spark.util.AkkaUtils
+import org.apache.spark.util.ByteBufferInputStream
class BlockManagerSuite extends FunSuite with BeforeAndAfter with PrivateMethodTester {
@@ -56,7 +57,7 @@ class BlockManagerSuite extends FunSuite with BeforeAndAfter with PrivateMethodT
System.setProperty("spark.hostPort", "localhost:" + boundPort)
master = new BlockManagerMaster(
- actorSystem.actorOf(Props(new spark.storage.BlockManagerMasterActor(true))))
+ actorSystem.actorOf(Props(new BlockManagerMasterActor(true))))
// Set the arch to 64-bit and compressedOops to true to get a deterministic test-case
oldArch = System.setProperty("os.arch", "amd64")
@@ -65,7 +66,7 @@ class BlockManagerSuite extends FunSuite with BeforeAndAfter with PrivateMethodT
val initialize = PrivateMethod[Unit]('initialize)
SizeEstimator invokePrivate initialize()
// Set some value ...
- System.setProperty("spark.hostPort", spark.Utils.localHostName() + ":" + 1111)
+ System.setProperty("spark.hostPort", Utils.localHostName() + ":" + 1111)
}
after {
@@ -105,10 +106,10 @@ class BlockManagerSuite extends FunSuite with BeforeAndAfter with PrivateMethodT
assert(level2 === level1, "level2 is not same as level1")
assert(level2.eq(level1), "level2 is not the same object as level1")
assert(level3 != level1, "level3 is same as level1")
- val bytes1 = spark.Utils.serialize(level1)
- val level1_ = spark.Utils.deserialize[StorageLevel](bytes1)
- val bytes2 = spark.Utils.serialize(level2)
- val level2_ = spark.Utils.deserialize[StorageLevel](bytes2)
+ val bytes1 = Utils.serialize(level1)
+ val level1_ = Utils.deserialize[StorageLevel](bytes1)
+ val bytes2 = Utils.serialize(level2)
+ val level2_ = Utils.deserialize[StorageLevel](bytes2)
assert(level1_ === level1, "Deserialized level1 not same as original level1")
assert(level1_.eq(level1), "Deserialized level1 not the same object as original level2")
assert(level2_ === level2, "Deserialized level2 not same as original level2")
@@ -122,10 +123,10 @@ class BlockManagerSuite extends FunSuite with BeforeAndAfter with PrivateMethodT
assert(id2 === id1, "id2 is not same as id1")
assert(id2.eq(id1), "id2 is not the same object as id1")
assert(id3 != id1, "id3 is same as id1")
- val bytes1 = spark.Utils.serialize(id1)
- val id1_ = spark.Utils.deserialize[BlockManagerId](bytes1)
- val bytes2 = spark.Utils.serialize(id2)
- val id2_ = spark.Utils.deserialize[BlockManagerId](bytes2)
+ val bytes1 = Utils.serialize(id1)
+ val id1_ = Utils.deserialize[BlockManagerId](bytes1)
+ val bytes2 = Utils.serialize(id2)
+ val id2_ = Utils.deserialize[BlockManagerId](bytes2)
assert(id1_ === id1, "Deserialized id1 is not same as original id1")
assert(id1_.eq(id1), "Deserialized id1 is not the same object as original id1")
assert(id2_ === id2, "Deserialized id2 is not same as original id2")
diff --git a/core/src/test/scala/spark/ui/UISuite.scala b/core/src/test/scala/org/apache/spark/ui/UISuite.scala
index 735a794396..3321fb5eb7 100644
--- a/core/src/test/scala/spark/ui/UISuite.scala
+++ b/core/src/test/scala/org/apache/spark/ui/UISuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.ui
+package org.apache.spark.ui
import scala.util.{Failure, Success, Try}
import java.net.ServerSocket
diff --git a/core/src/test/scala/spark/util/DistributionSuite.scala b/core/src/test/scala/org/apache/spark/util/DistributionSuite.scala
index 6578b55e82..63642461e4 100644
--- a/core/src/test/scala/spark/util/DistributionSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/DistributionSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.util
+package org.apache.spark.util
import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
diff --git a/core/src/test/scala/spark/util/FakeClock.scala b/core/src/test/scala/org/apache/spark/util/FakeClock.scala
index 236706317e..0a45917b08 100644
--- a/core/src/test/scala/spark/util/FakeClock.scala
+++ b/core/src/test/scala/org/apache/spark/util/FakeClock.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.util
+package org.apache.spark.util
class FakeClock extends Clock {
private var time = 0L
diff --git a/core/src/test/scala/spark/util/NextIteratorSuite.scala b/core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala
index fdbd43d941..45867463a5 100644
--- a/core/src/test/scala/spark/util/NextIteratorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.util
+package org.apache.spark.util
import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
diff --git a/core/src/test/scala/spark/util/RateLimitedOutputStreamSuite.scala b/core/src/test/scala/org/apache/spark/util/RateLimitedOutputStreamSuite.scala
index 4c0044202f..a9dd0b1a5b 100644
--- a/core/src/test/scala/spark/util/RateLimitedOutputStreamSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/RateLimitedOutputStreamSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.util
+package org.apache.spark.util
import org.scalatest.FunSuite
import java.io.ByteArrayOutputStream