aboutsummaryrefslogtreecommitdiff
path: root/streaming
diff options
context:
space:
mode:
authorMarcelo Vanzin <vanzin@cloudera.com>2016-01-05 19:07:39 -0800
committerMarcelo Vanzin <vanzin@cloudera.com>2016-01-05 19:07:39 -0800
commitb3ba1be3b77e42120145252b2730a56f1d55fd21 (patch)
tree06efab911dd05b5e4ec72d278f090fda9feaf4bd /streaming
parent7a375bb87a8df56d9dde0c484e725e5c497a9876 (diff)
downloadspark-b3ba1be3b77e42120145252b2730a56f1d55fd21.tar.gz
spark-b3ba1be3b77e42120145252b2730a56f1d55fd21.tar.bz2
spark-b3ba1be3b77e42120145252b2730a56f1d55fd21.zip
[SPARK-3873][TESTS] Import ordering fixes.
Author: Marcelo Vanzin <vanzin@cloudera.com> Closes #10582 from vanzin/SPARK-3873-tests.
Diffstat (limited to 'streaming')
-rw-r--r--streaming/src/test/scala/org/apache/spark/streaming/DStreamScopeSuite.scala2
-rw-r--r--streaming/src/test/scala/org/apache/spark/streaming/FailureSuite.scala2
-rw-r--r--streaming/src/test/scala/org/apache/spark/streaming/InputStreamsSuite.scala16
-rw-r--r--streaming/src/test/scala/org/apache/spark/streaming/MapWithStateSuite.scala4
-rw-r--r--streaming/src/test/scala/org/apache/spark/streaming/MasterFailureTest.scala18
-rw-r--r--streaming/src/test/scala/org/apache/spark/streaming/ReceivedBlockHandlerSuite.scala5
-rw-r--r--streaming/src/test/scala/org/apache/spark/streaming/ReceiverInputDStreamSuite.scala2
-rw-r--r--streaming/src/test/scala/org/apache/spark/streaming/ReceiverSuite.scala2
-rw-r--r--streaming/src/test/scala/org/apache/spark/streaming/StreamingListenerSuite.scala12
-rw-r--r--streaming/src/test/scala/org/apache/spark/streaming/TestSuiteBase.scala6
-rw-r--r--streaming/src/test/scala/org/apache/spark/streaming/WindowOperationsSuite.scala2
-rw-r--r--streaming/src/test/scala/org/apache/spark/streaming/rdd/MapWithStateRDDSuite.scala2
-rw-r--r--streaming/src/test/scala/org/apache/spark/streaming/rdd/WriteAheadLogBackedBlockRDDSuite.scala2
-rw-r--r--streaming/src/test/scala/org/apache/spark/streaming/receiver/BlockGeneratorSuite.scala4
-rw-r--r--streaming/src/test/scala/org/apache/spark/streaming/scheduler/InputInfoTrackerSuite.scala2
-rw-r--r--streaming/src/test/scala/org/apache/spark/streaming/util/WriteAheadLogSuite.scala11
-rw-r--r--streaming/src/test/scala/org/apache/spark/streaming/util/WriteAheadLogUtilsSuite.scala2
17 files changed, 46 insertions, 48 deletions
diff --git a/streaming/src/test/scala/org/apache/spark/streaming/DStreamScopeSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/DStreamScopeSuite.scala
index 4c12ecc399..94f1bcebc3 100644
--- a/streaming/src/test/scala/org/apache/spark/streaming/DStreamScopeSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/DStreamScopeSuite.scala
@@ -21,11 +21,11 @@ import scala.collection.mutable.ArrayBuffer
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll}
+import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}
import org.apache.spark.rdd.{RDD, RDDOperationScope}
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.ui.UIUtils
import org.apache.spark.util.ManualClock
-import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}
/**
* Tests whether scope information is passed from DStream operations to RDDs correctly.
diff --git a/streaming/src/test/scala/org/apache/spark/streaming/FailureSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/FailureSuite.scala
index e82c2fa4e7..6a0b0a1d47 100644
--- a/streaming/src/test/scala/org/apache/spark/streaming/FailureSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/FailureSuite.scala
@@ -21,7 +21,7 @@ import java.io.File
import org.scalatest.BeforeAndAfter
-import org.apache.spark.{SparkFunSuite, Logging}
+import org.apache.spark.{Logging, SparkFunSuite}
import org.apache.spark.util.Utils
/**
diff --git a/streaming/src/test/scala/org/apache/spark/streaming/InputStreamsSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/InputStreamsSuite.scala
index 3a3176b91b..2e231601c3 100644
--- a/streaming/src/test/scala/org/apache/spark/streaming/InputStreamsSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/InputStreamsSuite.scala
@@ -17,30 +17,30 @@
package org.apache.spark.streaming
-import java.io.{File, BufferedWriter, OutputStreamWriter}
-import java.net.{Socket, SocketException, ServerSocket}
+import java.io.{BufferedWriter, File, OutputStreamWriter}
+import java.net.{ServerSocket, Socket, SocketException}
import java.nio.charset.Charset
-import java.util.concurrent.{CountDownLatch, Executors, TimeUnit, ArrayBlockingQueue}
+import java.util.concurrent.{ArrayBlockingQueue, CountDownLatch, Executors, TimeUnit}
import java.util.concurrent.atomic.AtomicInteger
-import scala.collection.mutable.{SynchronizedBuffer, ArrayBuffer, SynchronizedQueue}
+import scala.collection.mutable.{ArrayBuffer, SynchronizedBuffer, SynchronizedQueue}
import scala.language.postfixOps
import com.google.common.io.Files
-import org.apache.hadoop.io.{Text, LongWritable}
-import org.apache.hadoop.mapreduce.lib.input.TextInputFormat
import org.apache.hadoop.fs.Path
+import org.apache.hadoop.io.{LongWritable, Text}
+import org.apache.hadoop.mapreduce.lib.input.TextInputFormat
import org.scalatest.BeforeAndAfter
import org.scalatest.concurrent.Eventually._
import org.apache.spark.Logging
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
-import org.apache.spark.streaming.scheduler.{StreamingListenerBatchCompleted, StreamingListener}
-import org.apache.spark.util.{ManualClock, Utils}
import org.apache.spark.streaming.dstream.{InputDStream, ReceiverInputDStream}
import org.apache.spark.streaming.rdd.WriteAheadLogBackedBlockRDD
import org.apache.spark.streaming.receiver.Receiver
+import org.apache.spark.streaming.scheduler.{StreamingListener, StreamingListenerBatchCompleted}
+import org.apache.spark.util.{ManualClock, Utils}
class InputStreamsSuite extends TestSuiteBase with BeforeAndAfter {
diff --git a/streaming/src/test/scala/org/apache/spark/streaming/MapWithStateSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/MapWithStateSuite.scala
index 62d75a9e0e..2984fd2b29 100644
--- a/streaming/src/test/scala/org/apache/spark/streaming/MapWithStateSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/MapWithStateSuite.scala
@@ -22,12 +22,12 @@ import java.io.File
import scala.collection.mutable.{ArrayBuffer, SynchronizedBuffer}
import scala.reflect.ClassTag
-import org.scalatest.PrivateMethodTester._
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll}
+import org.scalatest.PrivateMethodTester._
+import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}
import org.apache.spark.streaming.dstream.{DStream, InternalMapWithStateDStream, MapWithStateDStream, MapWithStateDStreamImpl}
import org.apache.spark.util.{ManualClock, Utils}
-import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}
class MapWithStateSuite extends SparkFunSuite
with DStreamCheckpointTester with BeforeAndAfterAll with BeforeAndAfter {
diff --git a/streaming/src/test/scala/org/apache/spark/streaming/MasterFailureTest.scala b/streaming/src/test/scala/org/apache/spark/streaming/MasterFailureTest.scala
index 0e64b57e0f..4e56dfbd42 100644
--- a/streaming/src/test/scala/org/apache/spark/streaming/MasterFailureTest.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/MasterFailureTest.scala
@@ -17,23 +17,21 @@
package org.apache.spark.streaming
-import org.apache.spark.Logging
-import org.apache.spark.streaming.dstream.DStream
-import org.apache.spark.util.Utils
-
-import scala.util.Random
-import scala.collection.mutable.ArrayBuffer
-import scala.reflect.ClassTag
-
import java.io.{File, IOException}
import java.nio.charset.Charset
import java.util.UUID
-import com.google.common.io.Files
+import scala.collection.mutable.ArrayBuffer
+import scala.reflect.ClassTag
+import scala.util.Random
-import org.apache.hadoop.fs.Path
+import com.google.common.io.Files
import org.apache.hadoop.conf.Configuration
+import org.apache.hadoop.fs.Path
+import org.apache.spark.Logging
+import org.apache.spark.streaming.dstream.DStream
+import org.apache.spark.util.Utils
private[streaming]
object MasterFailureTest extends Logging {
diff --git a/streaming/src/test/scala/org/apache/spark/streaming/ReceivedBlockHandlerSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/ReceivedBlockHandlerSuite.scala
index c17fb72381..dd16fc3eca 100644
--- a/streaming/src/test/scala/org/apache/spark/streaming/ReceivedBlockHandlerSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/ReceivedBlockHandlerSuite.scala
@@ -39,8 +39,6 @@ import org.apache.spark.storage._
import org.apache.spark.streaming.receiver._
import org.apache.spark.streaming.util._
import org.apache.spark.util.{ManualClock, Utils}
-import WriteAheadLogBasedBlockHandler._
-import WriteAheadLogSuite._
class ReceivedBlockHandlerSuite
extends SparkFunSuite
@@ -48,6 +46,9 @@ class ReceivedBlockHandlerSuite
with Matchers
with Logging {
+ import WriteAheadLogBasedBlockHandler._
+ import WriteAheadLogSuite._
+
val conf = new SparkConf()
.set("spark.streaming.receiver.writeAheadLog.rollingIntervalSecs", "1")
.set("spark.app.id", "streaming-test")
diff --git a/streaming/src/test/scala/org/apache/spark/streaming/ReceiverInputDStreamSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/ReceiverInputDStreamSuite.scala
index e6d8fbd4d7..a4871b460e 100644
--- a/streaming/src/test/scala/org/apache/spark/streaming/ReceiverInputDStreamSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/ReceiverInputDStreamSuite.scala
@@ -21,6 +21,7 @@ import scala.util.Random
import org.scalatest.BeforeAndAfterAll
+import org.apache.spark.{SparkConf, SparkEnv}
import org.apache.spark.rdd.BlockRDD
import org.apache.spark.storage.{StorageLevel, StreamBlockId}
import org.apache.spark.streaming.dstream.ReceiverInputDStream
@@ -28,7 +29,6 @@ import org.apache.spark.streaming.rdd.WriteAheadLogBackedBlockRDD
import org.apache.spark.streaming.receiver.{BlockManagerBasedStoreResult, Receiver, WriteAheadLogBasedStoreResult}
import org.apache.spark.streaming.scheduler.ReceivedBlockInfo
import org.apache.spark.streaming.util.{WriteAheadLogRecordHandle, WriteAheadLogUtils}
-import org.apache.spark.{SparkConf, SparkEnv}
class ReceiverInputDStreamSuite extends TestSuiteBase with BeforeAndAfterAll {
diff --git a/streaming/src/test/scala/org/apache/spark/streaming/ReceiverSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/ReceiverSuite.scala
index 01279b34f7..917232c9cd 100644
--- a/streaming/src/test/scala/org/apache/spark/streaming/ReceiverSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/ReceiverSuite.scala
@@ -24,8 +24,8 @@ import java.util.concurrent.Semaphore
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
-import org.scalatest.concurrent.Timeouts
import org.scalatest.concurrent.Eventually._
+import org.scalatest.concurrent.Timeouts
import org.scalatest.time.SpanSugar._
import org.apache.spark.SparkConf
diff --git a/streaming/src/test/scala/org/apache/spark/streaming/StreamingListenerSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/StreamingListenerSuite.scala
index 04cd5bdc26..628a508207 100644
--- a/streaming/src/test/scala/org/apache/spark/streaming/StreamingListenerSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/StreamingListenerSuite.scala
@@ -18,20 +18,20 @@
package org.apache.spark.streaming
import scala.collection.mutable.{ArrayBuffer, HashMap, SynchronizedBuffer, SynchronizedMap}
-import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
+import scala.concurrent.Future
+
+import org.scalatest.Matchers
+import org.scalatest.concurrent.Eventually._
+import org.scalatest.time.SpanSugar._
+import org.apache.spark.Logging
import org.apache.spark.SparkException
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.receiver.Receiver
import org.apache.spark.streaming.scheduler._
-import org.scalatest.Matchers
-import org.scalatest.concurrent.Eventually._
-import org.scalatest.time.SpanSugar._
-import org.apache.spark.Logging
-
class StreamingListenerSuite extends TestSuiteBase with Matchers {
val input = (1 to 4).map(Seq(_)).toSeq
diff --git a/streaming/src/test/scala/org/apache/spark/streaming/TestSuiteBase.scala b/streaming/src/test/scala/org/apache/spark/streaming/TestSuiteBase.scala
index be0f4636a6..54eff2b214 100644
--- a/streaming/src/test/scala/org/apache/spark/streaming/TestSuiteBase.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/TestSuiteBase.scala
@@ -17,7 +17,7 @@
package org.apache.spark.streaming
-import java.io.{ObjectInputStream, IOException}
+import java.io.{IOException, ObjectInputStream}
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.SynchronizedBuffer
@@ -25,13 +25,13 @@ import scala.language.implicitConversions
import scala.reflect.ClassTag
import org.scalatest.BeforeAndAfter
-import org.scalatest.time.{Span, Seconds => ScalaTestSeconds}
import org.scalatest.concurrent.Eventually.timeout
import org.scalatest.concurrent.PatienceConfiguration
+import org.scalatest.time.{Seconds => ScalaTestSeconds, Span}
import org.apache.spark.{Logging, SparkConf, SparkFunSuite}
import org.apache.spark.rdd.RDD
-import org.apache.spark.streaming.dstream.{DStream, InputDStream, ForEachDStream}
+import org.apache.spark.streaming.dstream.{DStream, ForEachDStream, InputDStream}
import org.apache.spark.streaming.scheduler._
import org.apache.spark.util.{ManualClock, Utils}
diff --git a/streaming/src/test/scala/org/apache/spark/streaming/WindowOperationsSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/WindowOperationsSuite.scala
index c39ad05f41..c7d085ec07 100644
--- a/streaming/src/test/scala/org/apache/spark/streaming/WindowOperationsSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/WindowOperationsSuite.scala
@@ -17,8 +17,8 @@
package org.apache.spark.streaming
-import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.storage.StorageLevel
+import org.apache.spark.streaming.dstream.DStream
class WindowOperationsSuite extends TestSuiteBase {
diff --git a/streaming/src/test/scala/org/apache/spark/streaming/rdd/MapWithStateRDDSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/rdd/MapWithStateRDDSuite.scala
index 1640b9e6b7..5b13fd6ad6 100644
--- a/streaming/src/test/scala/org/apache/spark/streaming/rdd/MapWithStateRDDSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/rdd/MapWithStateRDDSuite.scala
@@ -26,8 +26,8 @@ import org.scalatest.BeforeAndAfterAll
import org.apache.spark._
import org.apache.spark.rdd.RDD
-import org.apache.spark.streaming.util.OpenHashMapBasedStateMap
import org.apache.spark.streaming.{State, Time}
+import org.apache.spark.streaming.util.OpenHashMapBasedStateMap
import org.apache.spark.util.Utils
class MapWithStateRDDSuite extends SparkFunSuite with RDDCheckpointTester with BeforeAndAfterAll {
diff --git a/streaming/src/test/scala/org/apache/spark/streaming/rdd/WriteAheadLogBackedBlockRDDSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/rdd/WriteAheadLogBackedBlockRDDSuite.scala
index 43833c4361..79ac833c18 100644
--- a/streaming/src/test/scala/org/apache/spark/streaming/rdd/WriteAheadLogBackedBlockRDDSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/rdd/WriteAheadLogBackedBlockRDDSuite.scala
@@ -23,10 +23,10 @@ import scala.util.Random
import org.apache.hadoop.conf.Configuration
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
+import org.apache.spark.{SparkConf, SparkContext, SparkException, SparkFunSuite}
import org.apache.spark.storage.{BlockId, BlockManager, StorageLevel, StreamBlockId}
import org.apache.spark.streaming.util.{FileBasedWriteAheadLogSegment, FileBasedWriteAheadLogWriter}
import org.apache.spark.util.Utils
-import org.apache.spark.{SparkConf, SparkContext, SparkException, SparkFunSuite}
class WriteAheadLogBackedBlockRDDSuite
extends SparkFunSuite with BeforeAndAfterAll with BeforeAndAfterEach {
diff --git a/streaming/src/test/scala/org/apache/spark/streaming/receiver/BlockGeneratorSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/receiver/BlockGeneratorSuite.scala
index 92ad9fe52b..f5ec0ff60a 100644
--- a/streaming/src/test/scala/org/apache/spark/streaming/receiver/BlockGeneratorSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/receiver/BlockGeneratorSuite.scala
@@ -22,13 +22,13 @@ import scala.language.reflectiveCalls
import org.scalatest.BeforeAndAfter
import org.scalatest.Matchers._
-import org.scalatest.concurrent.Timeouts._
import org.scalatest.concurrent.Eventually._
+import org.scalatest.concurrent.Timeouts._
import org.scalatest.time.SpanSugar._
+import org.apache.spark.{SparkConf, SparkException, SparkFunSuite}
import org.apache.spark.storage.StreamBlockId
import org.apache.spark.util.ManualClock
-import org.apache.spark.{SparkException, SparkConf, SparkFunSuite}
class BlockGeneratorSuite extends SparkFunSuite with BeforeAndAfter {
diff --git a/streaming/src/test/scala/org/apache/spark/streaming/scheduler/InputInfoTrackerSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/scheduler/InputInfoTrackerSuite.scala
index f5248acf71..a7e365649d 100644
--- a/streaming/src/test/scala/org/apache/spark/streaming/scheduler/InputInfoTrackerSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/scheduler/InputInfoTrackerSuite.scala
@@ -20,7 +20,7 @@ package org.apache.spark.streaming.scheduler
import org.scalatest.BeforeAndAfter
import org.apache.spark.{SparkConf, SparkFunSuite}
-import org.apache.spark.streaming.{Time, Duration, StreamingContext}
+import org.apache.spark.streaming.{Duration, StreamingContext, Time}
class InputInfoTrackerSuite extends SparkFunSuite with BeforeAndAfter {
diff --git a/streaming/src/test/scala/org/apache/spark/streaming/util/WriteAheadLogSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/util/WriteAheadLogSuite.scala
index a670c7d638..b5d6a24ce8 100644
--- a/streaming/src/test/scala/org/apache/spark/streaming/util/WriteAheadLogSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/util/WriteAheadLogSuite.scala
@@ -19,8 +19,8 @@ package org.apache.spark.streaming.util
import java.io._
import java.nio.ByteBuffer
import java.util.{Iterator => JIterator}
+import java.util.concurrent.{CountDownLatch, RejectedExecutionException, ThreadPoolExecutor, TimeUnit}
import java.util.concurrent.atomic.AtomicInteger
-import java.util.concurrent.{RejectedExecutionException, TimeUnit, CountDownLatch, ThreadPoolExecutor}
import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
@@ -31,17 +31,16 @@ import scala.language.{implicitConversions, postfixOps}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.mockito.ArgumentCaptor
-import org.mockito.Matchers.{eq => meq}
-import org.mockito.Matchers._
+import org.mockito.Matchers.{eq => meq, _}
import org.mockito.Mockito._
+import org.scalatest.{BeforeAndAfter, BeforeAndAfterEach, PrivateMethodTester}
import org.scalatest.concurrent.Eventually
import org.scalatest.concurrent.Eventually._
-import org.scalatest.{PrivateMethodTester, BeforeAndAfterEach, BeforeAndAfter}
import org.scalatest.mock.MockitoSugar
-import org.apache.spark.streaming.scheduler._
-import org.apache.spark.util.{CompletionIterator, ThreadUtils, ManualClock, Utils}
import org.apache.spark.{SparkConf, SparkFunSuite}
+import org.apache.spark.streaming.scheduler._
+import org.apache.spark.util.{CompletionIterator, ManualClock, ThreadUtils, Utils}
/** Common tests for WriteAheadLogs that we would like to test with different configurations. */
abstract class CommonWriteAheadLogTests(
diff --git a/streaming/src/test/scala/org/apache/spark/streaming/util/WriteAheadLogUtilsSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/util/WriteAheadLogUtilsSuite.scala
index bfc5b0cf60..2a41177a5e 100644
--- a/streaming/src/test/scala/org/apache/spark/streaming/util/WriteAheadLogUtilsSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/util/WriteAheadLogUtilsSuite.scala
@@ -24,7 +24,7 @@ import scala.reflect.ClassTag
import org.apache.hadoop.conf.Configuration
-import org.apache.spark.{SparkException, SparkConf, SparkFunSuite}
+import org.apache.spark.{SparkConf, SparkException, SparkFunSuite}
import org.apache.spark.util.Utils
class WriteAheadLogUtilsSuite extends SparkFunSuite {