aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNirmalReddy <nirmal.reddy@imaginea.com>2014-02-18 14:44:36 -0800
committerAaron Davidson <aaron@databricks.com>2014-02-18 14:44:36 -0800
commitccb327a49a7323efd98a33223c438a670bba7cec (patch)
treea783ac1025e79666dda960db6241900b55644ec4
parentf74ae0ebcee59b70a56d34bdf63e3d1b38e2bd59 (diff)
downloadspark-ccb327a49a7323efd98a33223c438a670bba7cec.tar.gz
spark-ccb327a49a7323efd98a33223c438a670bba7cec.tar.bz2
spark-ccb327a49a7323efd98a33223c438a670bba7cec.zip
Optimized imports
Optimized imports and arranged according to scala style guide @ https://cwiki.apache.org/confluence/display/SPARK/Spark+Code+Style+Guide#SparkCodeStyleGuide-Imports Author: NirmalReddy <nirmal.reddy@imaginea.com> Author: NirmalReddy <nirmal_reddy2000@yahoo.com> Closes #613 from NirmalReddy/opt-imports and squashes the following commits: 578b4f5 [NirmalReddy] imported java.lang.Double as JDouble a2cbcc5 [NirmalReddy] addressed the comments 776d664 [NirmalReddy] Optimized imports in core
-rw-r--r--core/src/main/java/org/apache/spark/network/netty/FileClient.java4
-rw-r--r--core/src/main/java/org/apache/spark/network/netty/FileServerHandler.java4
-rw-r--r--core/src/main/scala/org/apache/hadoop/mapreduce/SparkHadoopMapReduceUtil.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/Accumulators.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/BlockStoreShuffleFetcher.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/CacheManager.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/FutureAction.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/HttpFileServer.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/HttpServer.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/MapOutputTracker.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/SerializableWritable.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/ShuffleFetcher.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/SparkConf.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/SparkContext.scala15
-rw-r--r--core/src/main/scala/org/apache/spark/SparkEnv.scala9
-rw-r--r--core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala44
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.java1
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.java1
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/function/Function.java5
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/function/Function2.java4
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/function/Function3.java5
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.java4
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/function/PairFunction.java4
-rw-r--r--core/src/main/scala/org/apache/spark/api/python/PythonPartitioner.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/Client.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala6
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala6
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala7
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/master/Master.scala5
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/master/SparkZooKeeperSession.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperLeaderElectionAgent.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperPersistenceEngine.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala3
-rwxr-xr-xcore/src/main/scala/org/apache/spark/deploy/worker/Worker.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/executor/ExecutorBackend.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala5
-rw-r--r--core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/io/CompressionCodec.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala6
-rw-r--r--core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/metrics/sink/GangliaSink.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala8
-rw-r--r--core/src/main/scala/org/apache/spark/network/BufferMessage.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/network/Connection.scala11
-rw-r--r--core/src/main/scala/org/apache/spark/network/ConnectionManager.scala13
-rw-r--r--core/src/main/scala/org/apache/spark/network/ConnectionManagerId.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala9
-rw-r--r--core/src/main/scala/org/apache/spark/network/Message.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/network/MessageChunk.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/network/MessageChunkHeader.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/network/ReceiverTest.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/network/SenderTest.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/network/netty/FileHeader.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/network/netty/ShuffleCopier.scala7
-rw-r--r--core/src/main/scala/org/apache/spark/network/netty/ShuffleSender.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/partial/ApproximateActionListener.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/partial/GroupedCountEvaluator.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala5
-rw-r--r--core/src/main/scala/org/apache/spark/partial/GroupedSumEvaluator.scala5
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/BlockRDD.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/CartesianRDD.scala5
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/CheckpointRDD.scala7
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/CoGroupedRDD.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/CoalescedRDD.scala7
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/FilteredRDD.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/FlatMappedRDD.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/FlatMappedValuesRDD.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/GlommedRDD.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/MapPartitionsRDD.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/MappedValuesRDD.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala10
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala7
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/PartitionPruningRDD.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/PartitionerAwareUnionRDD.scala5
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/PartitionwiseSampledRDD.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala5
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/RDD.scala15
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/RDDCheckpointData.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/SampledRDD.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/SequenceFileRDDFunctions.scala8
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/ShuffledRDD.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/SubtractedRDD.scala9
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/UnionRDD.scala6
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/ZippedPartitionsRDD.scala6
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/ZippedRDD.scala6
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/ZippedWithIndexRDD.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerSource.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala14
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/MapStatus.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/Schedulable.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/SchedulableBuilder.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/SchedulerBackend.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala5
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/Task.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/TaskDescription.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/TaskInfo.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/TaskLocality.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/TaskResult.scala7
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/TaskResultGetter.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedClusterMessage.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala7
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackend.scala7
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/local/LocalBackend.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/serializer/JavaSerializer.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala8
-rw-r--r--core/src/main/scala/org/apache/spark/serializer/Serializer.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/serializer/SerializerManager.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockFetcherIterator.scala5
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockManager.scala13
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockManagerMasterActor.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockManagerMessages.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockManagerSlaveActor.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockManagerSource.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockManagerWorker.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockMessage.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockStore.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/storage/DiskStore.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/storage/MemoryStore.scala7
-rw-r--r--core/src/main/scala/org/apache/spark/storage/StoragePerfTester.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/storage/StorageUtils.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/storage/ThreadingTest.scala7
-rw-r--r--core/src/main/scala/org/apache/spark/ui/JettyUtils.scala10
-rw-r--r--core/src/main/scala/org/apache/spark/ui/SparkUI.scala6
-rw-r--r--core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala5
-rw-r--r--core/src/main/scala/org/apache/spark/ui/exec/ExecutorsUI.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/ui/jobs/ExecutorTable.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/ui/jobs/JobProgressUI.scala14
-rw-r--r--core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala6
-rw-r--r--core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala5
-rw-r--r--core/src/main/scala/org/apache/spark/ui/jobs/StageTable.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/ui/storage/BlockManagerUI.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/ui/storage/IndexPage.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/util/AkkaUtils.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/util/ByteBufferInputStream.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/util/MutablePair.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/util/SerializableBuffer.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/util/SerializableHyperLogLog.scala5
-rw-r--r--core/src/main/scala/org/apache/spark/util/SizeEstimator.scala11
-rw-r--r--core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala5
-rw-r--r--core/src/main/scala/org/apache/spark/util/TimeStampedHashSet.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/util/Utils.scala12
-rw-r--r--core/src/main/scala/org/apache/spark/util/Vector.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/util/collection/BitSet.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/util/random/RandomSampler.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/util/random/XORShiftRandom.scala1
-rw-r--r--core/src/test/scala/org/apache/spark/AccumulatorSuite.scala7
-rw-r--r--core/src/test/scala/org/apache/spark/CheckpointSuite.scala7
-rw-r--r--core/src/test/scala/org/apache/spark/DistributedSuite.scala9
-rw-r--r--core/src/test/scala/org/apache/spark/DriverSuite.scala1
-rw-r--r--core/src/test/scala/org/apache/spark/FailureSuite.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/FileServerSuite.scala3
-rw-r--r--core/src/test/scala/org/apache/spark/FileSuite.scala9
-rw-r--r--core/src/test/scala/org/apache/spark/JavaAPISuite.java5
-rw-r--r--core/src/test/scala/org/apache/spark/JobCancellationSuite.scala3
-rw-r--r--core/src/test/scala/org/apache/spark/LocalSparkContext.scala7
-rw-r--r--core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala5
-rw-r--r--core/src/test/scala/org/apache/spark/PartitioningSuite.scala3
-rw-r--r--core/src/test/scala/org/apache/spark/PipedRDDSuite.scala1
-rw-r--r--core/src/test/scala/org/apache/spark/SharedSparkContext.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala1
-rw-r--r--core/src/test/scala/org/apache/spark/ShuffleSuite.scala5
-rw-r--r--core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala1
-rw-r--r--core/src/test/scala/org/apache/spark/ThreadingSuite.scala3
-rw-r--r--core/src/test/scala/org/apache/spark/UnpersistSuite.scala3
-rw-r--r--core/src/test/scala/org/apache/spark/ZippedPartitionsSuite.scala9
-rw-r--r--core/src/test/scala/org/apache/spark/api/python/PythonRDDSuite.scala5
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala6
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala5
-rw-r--r--core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala3
-rw-r--r--core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala1
-rw-r--r--core/src/test/scala/org/apache/spark/rdd/DoubleRDDSuite.scala6
-rw-r--r--core/src/test/scala/org/apache/spark/rdd/JdbcRDDSuite.scala7
-rw-r--r--core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala3
-rw-r--r--core/src/test/scala/org/apache/spark/rdd/ParallelCollectionSplitSuite.scala4
-rw-r--r--core/src/test/scala/org/apache/spark/rdd/PartitionPruningRDDSuite.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/rdd/PartitionwiseSampledRDDSuite.scala1
-rw-r--r--core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala8
-rw-r--r--core/src/test/scala/org/apache/spark/rdd/SortingSuite.scala1
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/ClusterSchedulerSuite.scala8
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala3
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/JobLoggerSuite.scala6
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala7
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/TaskSetManagerSuite.scala3
-rw-r--r--core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala4
-rw-r--r--core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala7
-rw-r--r--core/src/test/scala/org/apache/spark/storage/DiskBlockManagerSuite.scala3
-rw-r--r--core/src/test/scala/org/apache/spark/ui/UISuite.scala6
-rw-r--r--core/src/test/scala/org/apache/spark/ui/jobs/JobProgressListenerSuite.scala4
-rw-r--r--core/src/test/scala/org/apache/spark/util/ClosureCleanerSuite.scala4
-rw-r--r--core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala6
-rw-r--r--core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala3
-rw-r--r--core/src/test/scala/org/apache/spark/util/UtilsSuite.scala10
-rw-r--r--core/src/test/scala/org/apache/spark/util/collection/AppendOnlyMapSuite.scala3
-rw-r--r--core/src/test/scala/org/apache/spark/util/collection/BitSetSuite.scala1
-rw-r--r--core/src/test/scala/org/apache/spark/util/collection/OpenHashMapSuite.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/util/collection/OpenHashSetSuite.scala1
-rw-r--r--core/src/test/scala/org/apache/spark/util/collection/PrimitiveKeyOpenHashMapSuite.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/util/random/RandomSamplerSuite.scala6
-rw-r--r--core/src/test/scala/org/apache/spark/util/random/XORShiftRandomSuite.scala1
246 files changed, 446 insertions, 552 deletions
diff --git a/core/src/main/java/org/apache/spark/network/netty/FileClient.java b/core/src/main/java/org/apache/spark/network/netty/FileClient.java
index d2d778b756..0d31894d6e 100644
--- a/core/src/main/java/org/apache/spark/network/netty/FileClient.java
+++ b/core/src/main/java/org/apache/spark/network/netty/FileClient.java
@@ -17,6 +17,8 @@
package org.apache.spark.network.netty;
+import java.util.concurrent.TimeUnit;
+
import io.netty.bootstrap.Bootstrap;
import io.netty.channel.Channel;
import io.netty.channel.ChannelOption;
@@ -27,8 +29,6 @@ import io.netty.channel.socket.oio.OioSocketChannel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.util.concurrent.TimeUnit;
-
class FileClient {
private static final Logger LOG = LoggerFactory.getLogger(FileClient.class.getName());
diff --git a/core/src/main/java/org/apache/spark/network/netty/FileServerHandler.java b/core/src/main/java/org/apache/spark/network/netty/FileServerHandler.java
index 3ac045f944..c0133e19c7 100644
--- a/core/src/main/java/org/apache/spark/network/netty/FileServerHandler.java
+++ b/core/src/main/java/org/apache/spark/network/netty/FileServerHandler.java
@@ -23,11 +23,11 @@ import java.io.FileInputStream;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.channel.DefaultFileRegion;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.spark.storage.BlockId;
import org.apache.spark.storage.FileSegment;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
class FileServerHandler extends SimpleChannelInboundHandler<String> {
diff --git a/core/src/main/scala/org/apache/hadoop/mapreduce/SparkHadoopMapReduceUtil.scala b/core/src/main/scala/org/apache/hadoop/mapreduce/SparkHadoopMapReduceUtil.scala
index 32429f01ac..1fca5729c6 100644
--- a/core/src/main/scala/org/apache/hadoop/mapreduce/SparkHadoopMapReduceUtil.scala
+++ b/core/src/main/scala/org/apache/hadoop/mapreduce/SparkHadoopMapReduceUtil.scala
@@ -17,7 +17,8 @@
package org.apache.hadoop.mapreduce
-import java.lang.{Integer => JInteger, Boolean => JBoolean}
+import java.lang.{Boolean => JBoolean, Integer => JInteger}
+
import org.apache.hadoop.conf.Configuration
private[apache]
diff --git a/core/src/main/scala/org/apache/spark/Accumulators.scala b/core/src/main/scala/org/apache/spark/Accumulators.scala
index df01b2e942..73dd471ab1 100644
--- a/core/src/main/scala/org/apache/spark/Accumulators.scala
+++ b/core/src/main/scala/org/apache/spark/Accumulators.scala
@@ -19,8 +19,9 @@ package org.apache.spark
import java.io.{ObjectInputStream, Serializable}
-import scala.collection.mutable.Map
import scala.collection.generic.Growable
+import scala.collection.mutable.Map
+
import org.apache.spark.serializer.JavaSerializer
/**
diff --git a/core/src/main/scala/org/apache/spark/BlockStoreShuffleFetcher.scala b/core/src/main/scala/org/apache/spark/BlockStoreShuffleFetcher.scala
index d9ed572da6..754b46a4c7 100644
--- a/core/src/main/scala/org/apache/spark/BlockStoreShuffleFetcher.scala
+++ b/core/src/main/scala/org/apache/spark/BlockStoreShuffleFetcher.scala
@@ -20,12 +20,11 @@ package org.apache.spark
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.HashMap
-import org.apache.spark.executor.{ShuffleReadMetrics, TaskMetrics}
+import org.apache.spark.executor.ShuffleReadMetrics
import org.apache.spark.serializer.Serializer
import org.apache.spark.storage.{BlockId, BlockManagerId, ShuffleBlockId}
import org.apache.spark.util.CompletionIterator
-
private[spark] class BlockStoreShuffleFetcher extends ShuffleFetcher with Logging {
override def fetch[T](
diff --git a/core/src/main/scala/org/apache/spark/CacheManager.scala b/core/src/main/scala/org/apache/spark/CacheManager.scala
index b38af2497d..1daabecf23 100644
--- a/core/src/main/scala/org/apache/spark/CacheManager.scala
+++ b/core/src/main/scala/org/apache/spark/CacheManager.scala
@@ -18,9 +18,9 @@
package org.apache.spark
import scala.collection.mutable.{ArrayBuffer, HashSet}
-import org.apache.spark.storage.{BlockId, BlockManager, StorageLevel, RDDBlockId}
-import org.apache.spark.rdd.RDD
+import org.apache.spark.rdd.RDD
+import org.apache.spark.storage.{BlockManager, RDDBlockId, StorageLevel}
/** Spark class responsible for passing RDDs split contents to the BlockManager and making
sure a node doesn't load two copies of an RDD at once.
diff --git a/core/src/main/scala/org/apache/spark/FutureAction.scala b/core/src/main/scala/org/apache/spark/FutureAction.scala
index d7d10285da..f2decd14ef 100644
--- a/core/src/main/scala/org/apache/spark/FutureAction.scala
+++ b/core/src/main/scala/org/apache/spark/FutureAction.scala
@@ -21,10 +21,8 @@ import scala.concurrent._
import scala.concurrent.duration.Duration
import scala.util.Try
-import org.apache.spark.scheduler.{JobSucceeded, JobWaiter}
-import org.apache.spark.scheduler.JobFailed
import org.apache.spark.rdd.RDD
-
+import org.apache.spark.scheduler.{JobFailed, JobSucceeded, JobWaiter}
/**
* A future for the result of an action to support cancellation. This is an extension of the
diff --git a/core/src/main/scala/org/apache/spark/HttpFileServer.scala b/core/src/main/scala/org/apache/spark/HttpFileServer.scala
index a885898ad4..d3264a4bb3 100644
--- a/core/src/main/scala/org/apache/spark/HttpFileServer.scala
+++ b/core/src/main/scala/org/apache/spark/HttpFileServer.scala
@@ -17,8 +17,10 @@
package org.apache.spark
-import java.io.{File}
+import java.io.File
+
import com.google.common.io.Files
+
import org.apache.spark.util.Utils
private[spark] class HttpFileServer extends Logging {
diff --git a/core/src/main/scala/org/apache/spark/HttpServer.scala b/core/src/main/scala/org/apache/spark/HttpServer.scala
index 69a738dc44..759e68ee0c 100644
--- a/core/src/main/scala/org/apache/spark/HttpServer.scala
+++ b/core/src/main/scala/org/apache/spark/HttpServer.scala
@@ -18,7 +18,6 @@
package org.apache.spark
import java.io.File
-import java.net.InetAddress
import org.eclipse.jetty.server.Server
import org.eclipse.jetty.server.bio.SocketConnector
@@ -26,6 +25,7 @@ import org.eclipse.jetty.server.handler.DefaultHandler
import org.eclipse.jetty.server.handler.HandlerList
import org.eclipse.jetty.server.handler.ResourceHandler
import org.eclipse.jetty.util.thread.QueuedThreadPool
+
import org.apache.spark.util.Utils
/**
diff --git a/core/src/main/scala/org/apache/spark/MapOutputTracker.scala b/core/src/main/scala/org/apache/spark/MapOutputTracker.scala
index 8d6db0fca2..5968973132 100644
--- a/core/src/main/scala/org/apache/spark/MapOutputTracker.scala
+++ b/core/src/main/scala/org/apache/spark/MapOutputTracker.scala
@@ -22,7 +22,6 @@ import java.util.zip.{GZIPInputStream, GZIPOutputStream}
import scala.collection.mutable.HashSet
import scala.concurrent.Await
-import scala.concurrent.duration._
import akka.actor._
import akka.pattern.ask
diff --git a/core/src/main/scala/org/apache/spark/SerializableWritable.scala b/core/src/main/scala/org/apache/spark/SerializableWritable.scala
index fdd4c24e23..dff665cae6 100644
--- a/core/src/main/scala/org/apache/spark/SerializableWritable.scala
+++ b/core/src/main/scala/org/apache/spark/SerializableWritable.scala
@@ -19,9 +19,9 @@ package org.apache.spark
import java.io._
+import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.io.ObjectWritable
import org.apache.hadoop.io.Writable
-import org.apache.hadoop.conf.Configuration
class SerializableWritable[T <: Writable](@transient var t: T) extends Serializable {
def value = t
diff --git a/core/src/main/scala/org/apache/spark/ShuffleFetcher.scala b/core/src/main/scala/org/apache/spark/ShuffleFetcher.scala
index a85aa50a9b..e8f756c408 100644
--- a/core/src/main/scala/org/apache/spark/ShuffleFetcher.scala
+++ b/core/src/main/scala/org/apache/spark/ShuffleFetcher.scala
@@ -17,10 +17,8 @@
package org.apache.spark
-import org.apache.spark.executor.TaskMetrics
import org.apache.spark.serializer.Serializer
-
private[spark] abstract class ShuffleFetcher {
/**
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala
index 45d19bcbfa..b947feb891 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -20,8 +20,6 @@ package org.apache.spark
import scala.collection.JavaConverters._
import scala.collection.mutable.HashMap
-import java.io.{ObjectInputStream, ObjectOutputStream, IOException}
-
/**
* Configuration for a Spark application. Used to set various Spark parameters as key-value pairs.
*
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 5a6d06b66e..a24f07e9a6 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -19,21 +19,18 @@ package org.apache.spark
import java.io._
import java.net.URI
-import java.util.{UUID, Properties}
+import java.util.{Properties, UUID}
import java.util.concurrent.atomic.AtomicInteger
import scala.collection.{Map, Set}
import scala.collection.generic.Growable
-
import scala.collection.mutable.{ArrayBuffer, HashMap}
import scala.reflect.{ClassTag, classTag}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
-import org.apache.hadoop.io.{ArrayWritable, BooleanWritable, BytesWritable, DoubleWritable,
- FloatWritable, IntWritable, LongWritable, NullWritable, Text, Writable}
-import org.apache.hadoop.mapred.{FileInputFormat, InputFormat, JobConf, SequenceFileInputFormat,
- TextInputFormat}
+import org.apache.hadoop.io.{ArrayWritable, BooleanWritable, BytesWritable, DoubleWritable, FloatWritable, IntWritable, LongWritable, NullWritable, Text, Writable}
+import org.apache.hadoop.mapred.{FileInputFormat, InputFormat, JobConf, SequenceFileInputFormat, TextInputFormat}
import org.apache.hadoop.mapreduce.{InputFormat => NewInputFormat, Job => NewHadoopJob}
import org.apache.hadoop.mapreduce.lib.input.{FileInputFormat => NewFileInputFormat}
import org.apache.mesos.MesosNativeLibrary
@@ -42,14 +39,12 @@ import org.apache.spark.deploy.{LocalSparkCluster, SparkHadoopUtil}
import org.apache.spark.partial.{ApproximateEvaluator, PartialResult}
import org.apache.spark.rdd._
import org.apache.spark.scheduler._
-import org.apache.spark.scheduler.cluster.{CoarseGrainedSchedulerBackend,
- SparkDeploySchedulerBackend, SimrSchedulerBackend}
+import org.apache.spark.scheduler.cluster.{CoarseGrainedSchedulerBackend, SparkDeploySchedulerBackend, SimrSchedulerBackend}
import org.apache.spark.scheduler.cluster.mesos.{CoarseMesosSchedulerBackend, MesosSchedulerBackend}
import org.apache.spark.scheduler.local.LocalBackend
import org.apache.spark.storage.{BlockManagerSource, RDDInfo, StorageStatus, StorageUtils}
import org.apache.spark.ui.SparkUI
-import org.apache.spark.util.{Utils, TimeStampedHashMap, MetadataCleaner, MetadataCleanerType,
- ClosureCleaner}
+import org.apache.spark.util.{ClosureCleaner, MetadataCleaner, MetadataCleanerType, TimeStampedHashMap, Utils}
/**
* Main entry point for Spark functionality. A SparkContext represents the connection to a Spark
diff --git a/core/src/main/scala/org/apache/spark/SparkEnv.scala b/core/src/main/scala/org/apache/spark/SparkEnv.scala
index 6ae020f6a2..7ac65828f6 100644
--- a/core/src/main/scala/org/apache/spark/SparkEnv.scala
+++ b/core/src/main/scala/org/apache/spark/SparkEnv.scala
@@ -21,16 +21,15 @@ import scala.collection.mutable
import scala.concurrent.Await
import akka.actor._
+import com.google.common.collect.MapMaker
+import org.apache.spark.api.python.PythonWorkerFactory
import org.apache.spark.broadcast.BroadcastManager
import org.apache.spark.metrics.MetricsSystem
-import org.apache.spark.storage.{BlockManagerMasterActor, BlockManager, BlockManagerMaster}
+import org.apache.spark.storage.{BlockManager, BlockManagerMaster, BlockManagerMasterActor}
import org.apache.spark.network.ConnectionManager
import org.apache.spark.serializer.{Serializer, SerializerManager}
-import org.apache.spark.util.{Utils, AkkaUtils}
-import org.apache.spark.api.python.PythonWorkerFactory
-
-import com.google.common.collect.MapMaker
+import org.apache.spark.util.{AkkaUtils, Utils}
/**
* Holds all the runtime environment objects for a running Spark instance (either master or worker),
diff --git a/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala b/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala
index 4e63117a51..d404459a8e 100644
--- a/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala
+++ b/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala
@@ -18,8 +18,8 @@
package org.apache.hadoop.mapred
import java.io.IOException
-import java.text.SimpleDateFormat
import java.text.NumberFormat
+import java.text.SimpleDateFormat
import java.util.Date
import org.apache.hadoop.fs.FileSystem
diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala b/core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala
index 33737e1960..071044463d 100644
--- a/core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala
@@ -17,27 +17,25 @@
package org.apache.spark.api.java
+import java.lang.{Double => JDouble}
+
import scala.reflect.ClassTag
-import org.apache.spark.rdd.RDD
+import org.apache.spark.Partitioner
import org.apache.spark.SparkContext.doubleRDDToDoubleRDDFunctions
import org.apache.spark.api.java.function.{Function => JFunction}
-import org.apache.spark.util.StatCounter
import org.apache.spark.partial.{BoundedDouble, PartialResult}
+import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
+import org.apache.spark.util.StatCounter
-import java.lang.Double
-import org.apache.spark.Partitioner
-
-import scala.collection.JavaConverters._
-
-class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, JavaDoubleRDD] {
+class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[JDouble, JavaDoubleRDD] {
- override val classTag: ClassTag[Double] = implicitly[ClassTag[Double]]
+ override val classTag: ClassTag[JDouble] = implicitly[ClassTag[JDouble]]
- override val rdd: RDD[Double] = srdd.map(x => Double.valueOf(x))
+ override val rdd: RDD[JDouble] = srdd.map(x => JDouble.valueOf(x))
- override def wrapRDD(rdd: RDD[Double]): JavaDoubleRDD =
+ override def wrapRDD(rdd: RDD[JDouble]): JavaDoubleRDD =
new JavaDoubleRDD(rdd.map(_.doubleValue))
// Common RDD functions
@@ -67,7 +65,7 @@ class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, Jav
def unpersist(blocking: Boolean): JavaDoubleRDD = fromRDD(srdd.unpersist(blocking))
// first() has to be overriden here in order for its return type to be Double instead of Object.
- override def first(): Double = srdd.first()
+ override def first(): JDouble = srdd.first()
// Transformations (return a new RDD)
@@ -84,7 +82,7 @@ class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, Jav
/**
* Return a new RDD containing only the elements that satisfy a predicate.
*/
- def filter(f: JFunction[Double, java.lang.Boolean]): JavaDoubleRDD =
+ def filter(f: JFunction[JDouble, java.lang.Boolean]): JavaDoubleRDD =
fromRDD(srdd.filter(x => f(x).booleanValue()))
/**
@@ -133,7 +131,7 @@ class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, Jav
/**
* Return a sampled subset of this RDD.
*/
- def sample(withReplacement: Boolean, fraction: Double, seed: Int): JavaDoubleRDD =
+ def sample(withReplacement: Boolean, fraction: JDouble, seed: Int): JavaDoubleRDD =
fromRDD(srdd.sample(withReplacement, fraction, seed))
/**
@@ -145,7 +143,7 @@ class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, Jav
// Double RDD functions
/** Add up the elements in this RDD. */
- def sum(): Double = srdd.sum()
+ def sum(): JDouble = srdd.sum()
/**
* Return a [[org.apache.spark.util.StatCounter]] object that captures the mean, variance and
@@ -154,35 +152,35 @@ class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, Jav
def stats(): StatCounter = srdd.stats()
/** Compute the mean of this RDD's elements. */
- def mean(): Double = srdd.mean()
+ def mean(): JDouble = srdd.mean()
/** Compute the variance of this RDD's elements. */
- def variance(): Double = srdd.variance()
+ def variance(): JDouble = srdd.variance()
/** Compute the standard deviation of this RDD's elements. */
- def stdev(): Double = srdd.stdev()
+ def stdev(): JDouble = srdd.stdev()
/**
* Compute the sample standard deviation of this RDD's elements (which corrects for bias in
* estimating the standard deviation by dividing by N-1 instead of N).
*/
- def sampleStdev(): Double = srdd.sampleStdev()
+ def sampleStdev(): JDouble = srdd.sampleStdev()
/**
* Compute the sample variance of this RDD's elements (which corrects for bias in
* estimating the standard variance by dividing by N-1 instead of N).
*/
- def sampleVariance(): Double = srdd.sampleVariance()
+ def sampleVariance(): JDouble = srdd.sampleVariance()
/** Return the approximate mean of the elements in this RDD. */
- def meanApprox(timeout: Long, confidence: Double): PartialResult[BoundedDouble] =
+ def meanApprox(timeout: Long, confidence: JDouble): PartialResult[BoundedDouble] =
srdd.meanApprox(timeout, confidence)
/** (Experimental) Approximate operation to return the mean within a timeout. */
def meanApprox(timeout: Long): PartialResult[BoundedDouble] = srdd.meanApprox(timeout)
/** (Experimental) Approximate operation to return the sum within a timeout. */
- def sumApprox(timeout: Long, confidence: Double): PartialResult[BoundedDouble] =
+ def sumApprox(timeout: Long, confidence: JDouble): PartialResult[BoundedDouble] =
srdd.sumApprox(timeout, confidence)
/** (Experimental) Approximate operation to return the sum within a timeout. */
@@ -222,7 +220,7 @@ class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, Jav
srdd.histogram(buckets, false)
}
- def histogram(buckets: Array[Double], evenBuckets: Boolean): Array[Long] = {
+ def histogram(buckets: Array[JDouble], evenBuckets: Boolean): Array[Long] = {
srdd.histogram(buckets.map(_.toDouble), evenBuckets)
}
diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala b/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala
index 7b73057953..0055c98844 100644
--- a/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala
@@ -20,8 +20,8 @@ package org.apache.spark.api.java
import scala.reflect.ClassTag
import org.apache.spark._
-import org.apache.spark.rdd.RDD
import org.apache.spark.api.java.function.{Function => JFunction}
+import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
class JavaRDD[T](val rdd: RDD[T])(implicit val classTag: ClassTag[T])
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.java
index 3e85052cd0..30e6a52474 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.java
@@ -17,7 +17,6 @@
package org.apache.spark.api.java.function;
-
import java.io.Serializable;
/**
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.java
index 5e9b8c48b8..490da255bc 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.java
@@ -17,7 +17,6 @@
package org.apache.spark.api.java.function;
-
import java.io.Serializable;
/**
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/Function.java b/core/src/main/scala/org/apache/spark/api/java/function/Function.java
index 537439ef53..e0fcd460c8 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/Function.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/Function.java
@@ -17,11 +17,10 @@
package org.apache.spark.api.java.function;
-import scala.reflect.ClassTag;
-import scala.reflect.ClassTag$;
-
import java.io.Serializable;
+import scala.reflect.ClassTag;
+import scala.reflect.ClassTag$;
/**
* Base class for functions whose return types do not create special RDDs. PairFunction and
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/Function2.java b/core/src/main/scala/org/apache/spark/api/java/function/Function2.java
index a2d1214fb4..16d7379462 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/Function2.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/Function2.java
@@ -17,11 +17,11 @@
package org.apache.spark.api.java.function;
+import java.io.Serializable;
+
import scala.reflect.ClassTag;
import scala.reflect.ClassTag$;
-import java.io.Serializable;
-
/**
* A two-argument function that takes arguments of type T1 and T2 and returns an R.
*/
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/Function3.java b/core/src/main/scala/org/apache/spark/api/java/function/Function3.java
index fb1deceab5..096eb71f95 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/Function3.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/Function3.java
@@ -17,11 +17,10 @@
package org.apache.spark.api.java.function;
+import java.io.Serializable;
+
import scala.reflect.ClassTag;
import scala.reflect.ClassTag$;
-import scala.runtime.AbstractFunction2;
-
-import java.io.Serializable;
/**
* A three-argument function that takes arguments of type T1, T2 and T3 and returns an R.
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.java
index ca485b3cc2..c72b98c28a 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.java
@@ -17,12 +17,12 @@
package org.apache.spark.api.java.function;
+import java.io.Serializable;
+
import scala.Tuple2;
import scala.reflect.ClassTag;
import scala.reflect.ClassTag$;
-import java.io.Serializable;
-
/**
* A function that returns zero or more key-value pair records from each input record. The
* key-value pairs are represented as scala.Tuple2 objects.
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/PairFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/PairFunction.java
index cbe2306026..84b9136d98 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/PairFunction.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/PairFunction.java
@@ -17,12 +17,12 @@
package org.apache.spark.api.java.function;
+import java.io.Serializable;
+
import scala.Tuple2;
import scala.reflect.ClassTag;
import scala.reflect.ClassTag$;
-import java.io.Serializable;
-
/**
* A function that returns key-value pairs (Tuple2<K, V>), and can be used to construct PairRDDs.
*/
diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonPartitioner.scala b/core/src/main/scala/org/apache/spark/api/python/PythonPartitioner.scala
index 35eca62ecd..95bec5030b 100644
--- a/core/src/main/scala/org/apache/spark/api/python/PythonPartitioner.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonPartitioner.scala
@@ -17,8 +17,6 @@
package org.apache.spark.api.python
-import java.util.Arrays
-
import org.apache.spark.Partitioner
import org.apache.spark.util.Utils
diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
index 33667a998e..e4d0285710 100644
--- a/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
@@ -24,9 +24,9 @@ import java.util.{List => JList, ArrayList => JArrayList, Map => JMap, Collectio
import scala.collection.JavaConversions._
import scala.reflect.ClassTag
+import org.apache.spark._
import org.apache.spark.api.java.{JavaSparkContext, JavaPairRDD, JavaRDD}
import org.apache.spark.broadcast.Broadcast
-import org.apache.spark._
import org.apache.spark.rdd.RDD
import org.apache.spark.util.Utils
diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala b/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala
index f291266fcf..a5f0f3d5e7 100644
--- a/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala
@@ -17,8 +17,8 @@
package org.apache.spark.api.python
-import java.io.{OutputStreamWriter, File, DataInputStream, IOException}
-import java.net.{ServerSocket, Socket, SocketException, InetAddress}
+import java.io.{DataInputStream, File, IOException, OutputStreamWriter}
+import java.net.{InetAddress, ServerSocket, Socket, SocketException}
import scala.collection.JavaConversions._
diff --git a/core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala b/core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala
index 39ee0dbb92..20207c2613 100644
--- a/core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala
+++ b/core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala
@@ -24,7 +24,7 @@ import java.util.concurrent.TimeUnit
import it.unimi.dsi.fastutil.io.FastBufferedInputStream
import it.unimi.dsi.fastutil.io.FastBufferedOutputStream
-import org.apache.spark.{SparkConf, HttpServer, Logging, SparkEnv}
+import org.apache.spark.{HttpServer, Logging, SparkConf, SparkEnv}
import org.apache.spark.io.CompressionCodec
import org.apache.spark.storage.{BroadcastBlockId, StorageLevel}
import org.apache.spark.util.{MetadataCleaner, MetadataCleanerType, TimeStampedHashSet, Utils}
diff --git a/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala b/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala
index ec997255d5..22d783c859 100644
--- a/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala
+++ b/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala
@@ -26,7 +26,6 @@ import org.apache.spark._
import org.apache.spark.storage.{BroadcastBlockId, BroadcastHelperBlockId, StorageLevel}
import org.apache.spark.util.Utils
-
private[spark] class TorrentBroadcast[T](@transient var value_ : T, isLocal: Boolean, id: Long)
extends Broadcast[T](id) with Logging with Serializable {
diff --git a/core/src/main/scala/org/apache/spark/deploy/Client.scala b/core/src/main/scala/org/apache/spark/deploy/Client.scala
index 9987e2300c..eb5676b51d 100644
--- a/core/src/main/scala/org/apache/spark/deploy/Client.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/Client.scala
@@ -23,13 +23,13 @@ import scala.concurrent._
import akka.actor._
import akka.pattern.ask
+import akka.remote.{AssociationErrorEvent, DisassociatedEvent, RemotingLifecycleEvent}
import org.apache.log4j.{Level, Logger}
import org.apache.spark.{Logging, SparkConf}
import org.apache.spark.deploy.DeployMessages._
import org.apache.spark.deploy.master.{DriverState, Master}
import org.apache.spark.util.{AkkaUtils, Utils}
-import akka.remote.{AssociationErrorEvent, DisassociatedEvent, RemotingLifecycleEvent}
/**
* Proxy that relays messages to the driver.
diff --git a/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala b/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala
index 7de7c4864e..190b331cfe 100644
--- a/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala
@@ -21,10 +21,10 @@ import java.io._
import java.net.URL
import java.util.concurrent.TimeoutException
+import scala.collection.mutable.ListBuffer
import scala.concurrent.{Await, future, promise}
-import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global
-import scala.collection.mutable.ListBuffer
+import scala.concurrent.duration._
import scala.sys.process._
import net.liftweb.json.JsonParser
diff --git a/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala b/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala
index 33e69371b8..318beb5db5 100644
--- a/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala
@@ -20,10 +20,9 @@ package org.apache.spark.deploy
import net.liftweb.json.JsonDSL._
import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse}
-import org.apache.spark.deploy.master.{ApplicationInfo, WorkerInfo, DriverInfo}
+import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, WorkerInfo}
import org.apache.spark.deploy.worker.ExecutorRunner
-
private[spark] object JsonProtocol {
def writeWorkerInfo(obj: WorkerInfo) = {
("id" -> obj.id) ~
diff --git a/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala b/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala
index 488843a32c..a73b459c3c 100644
--- a/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala
@@ -17,14 +17,14 @@
package org.apache.spark.deploy
+import scala.collection.mutable.ArrayBuffer
+
import akka.actor.ActorSystem
+import org.apache.spark.{Logging, SparkConf}
import org.apache.spark.deploy.worker.Worker
import org.apache.spark.deploy.master.Master
import org.apache.spark.util.Utils
-import org.apache.spark.{SparkConf, Logging}
-
-import scala.collection.mutable.ArrayBuffer
/**
* Testing class that creates a Spark standalone process in-cluster (that is, running the
diff --git a/core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala b/core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala
index 8017932032..1550c3eb42 100644
--- a/core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala
@@ -17,9 +17,9 @@
package org.apache.spark.deploy.client
-import org.apache.spark.util.{Utils, AkkaUtils}
-import org.apache.spark.{SparkConf, SparkContext, Logging}
-import org.apache.spark.deploy.{Command, ApplicationDescription}
+import org.apache.spark.{Logging, SparkConf}
+import org.apache.spark.deploy.{ApplicationDescription, Command}
+import org.apache.spark.util.{AkkaUtils, Utils}
private[spark] object TestClient {
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala b/core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala
index 3e26379166..e8867bc169 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala
@@ -17,11 +17,14 @@
package org.apache.spark.deploy.master
-import org.apache.spark.deploy.ApplicationDescription
import java.util.Date
-import akka.actor.ActorRef
+
import scala.collection.mutable
+import akka.actor.ActorRef
+
+import org.apache.spark.deploy.ApplicationDescription
+
private[spark] class ApplicationInfo(
val startTime: Long,
val id: String,
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala b/core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala
index 74bb9ebf1d..aa85aa060d 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala
@@ -20,6 +20,7 @@ package org.apache.spark.deploy.master
import java.io._
import akka.serialization.Serialization
+
import org.apache.spark.Logging
/**
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/Master.scala b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
index e44f90c141..51794ce40c 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
@@ -30,15 +30,14 @@ import akka.pattern.ask
import akka.remote.{DisassociatedEvent, RemotingLifecycleEvent}
import akka.serialization.SerializationExtension
-
-import org.apache.spark.{SparkConf, Logging, SparkException}
+import org.apache.spark.{Logging, SparkConf, SparkException}
import org.apache.spark.deploy.{ApplicationDescription, DriverDescription, ExecutorState}
import org.apache.spark.deploy.DeployMessages._
+import org.apache.spark.deploy.master.DriverState.DriverState
import org.apache.spark.deploy.master.MasterMessages._
import org.apache.spark.deploy.master.ui.MasterWebUI
import org.apache.spark.metrics.MetricsSystem
import org.apache.spark.util.{AkkaUtils, Utils}
-import org.apache.spark.deploy.master.DriverState.DriverState
private[spark] class Master(host: String, port: Int, webUiPort: Int) extends Actor with Logging {
import context.dispatcher // to use Akka's scheduler.schedule()
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala b/core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala
index e7f3224091..a87781fb93 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala
@@ -17,8 +17,8 @@
package org.apache.spark.deploy.master
-import org.apache.spark.util.{Utils, IntParam}
import org.apache.spark.SparkConf
+import org.apache.spark.util.{IntParam, Utils}
/**
* Command-line parser for the master.
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/SparkZooKeeperSession.scala b/core/src/main/scala/org/apache/spark/deploy/master/SparkZooKeeperSession.scala
index 999090ad74..57758055b1 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/SparkZooKeeperSession.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/SparkZooKeeperSession.scala
@@ -23,7 +23,7 @@ import org.apache.zookeeper._
import org.apache.zookeeper.Watcher.Event.KeeperState
import org.apache.zookeeper.data.Stat
-import org.apache.spark.{SparkConf, Logging}
+import org.apache.spark.{Logging, SparkConf}
/**
* Provides a Scala-side interface to the standard ZooKeeper client, with the addition of retry
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperLeaderElectionAgent.scala b/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperLeaderElectionAgent.scala
index 77c23fb9fb..47b8f67f8a 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperLeaderElectionAgent.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperLeaderElectionAgent.scala
@@ -21,7 +21,7 @@ import akka.actor.ActorRef
import org.apache.zookeeper._
import org.apache.zookeeper.Watcher.Event.EventType
-import org.apache.spark.{SparkConf, Logging}
+import org.apache.spark.{Logging, SparkConf}
import org.apache.spark.deploy.master.MasterMessages._
private[spark] class ZooKeeperLeaderElectionAgent(val masterActor: ActorRef,
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperPersistenceEngine.scala b/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperPersistenceEngine.scala
index 10816a1f43..48b2fc06a9 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperPersistenceEngine.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperPersistenceEngine.scala
@@ -17,10 +17,10 @@
package org.apache.spark.deploy.master
-import org.apache.spark.{SparkConf, Logging}
+import akka.serialization.Serialization
import org.apache.zookeeper._
-import akka.serialization.Serialization
+import org.apache.spark.{Logging, SparkConf}
class ZooKeeperPersistenceEngine(serialization: Serialization, conf: SparkConf)
extends PersistenceEngine
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala b/core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala
index f29a6ad2e7..5cc4adbe44 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala
@@ -17,11 +17,12 @@
package org.apache.spark.deploy.master.ui
+import javax.servlet.http.HttpServletRequest
+
import scala.concurrent.Await
import scala.xml.Node
import akka.pattern.ask
-import javax.servlet.http.HttpServletRequest
import net.liftweb.json.JsonAST.JValue
import org.apache.spark.deploy.JsonProtocol
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala b/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala
index 04f9a22a25..01c8f9065e 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala
@@ -17,12 +17,12 @@
package org.apache.spark.deploy.master.ui
+import javax.servlet.http.HttpServletRequest
+
import scala.concurrent.Await
-import scala.concurrent.duration._
import scala.xml.Node
import akka.pattern.ask
-import javax.servlet.http.HttpServletRequest
import net.liftweb.json.JsonAST.JValue
import org.apache.spark.deploy.{DeployWebUI, JsonProtocol}
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala
index 05c4df891e..5ab13e7aa6 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala
@@ -18,6 +18,7 @@
package org.apache.spark.deploy.master.ui
import javax.servlet.http.HttpServletRequest
+
import org.eclipse.jetty.server.{Handler, Server}
import org.apache.spark.Logging
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala b/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala
index 2ceccc703d..0c761dfc93 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala
@@ -17,7 +17,7 @@
package org.apache.spark.deploy.worker
-import java.io.{File, FileOutputStream, IOException, InputStream}
+import java.io.{File, FileOutputStream, InputStream, IOException}
import java.lang.System._
import org.apache.spark.Logging
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
index 18885d7ca6..2edd921066 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
@@ -20,12 +20,11 @@ package org.apache.spark.deploy.worker
import java.io._
import akka.actor.ActorRef
-
import com.google.common.base.Charsets
import com.google.common.io.Files
import org.apache.spark.Logging
-import org.apache.spark.deploy.{ExecutorState, ApplicationDescription, Command}
+import org.apache.spark.deploy.{ApplicationDescription, Command, ExecutorState}
import org.apache.spark.deploy.DeployMessages.ExecutorStateChanged
/**
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
index f4ee0e2343..7b0b7861b7 100755
--- a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
@@ -31,7 +31,6 @@ import org.apache.spark.{Logging, SparkConf, SparkException}
import org.apache.spark.deploy.{ExecutorDescription, ExecutorState}
import org.apache.spark.deploy.DeployMessages._
import org.apache.spark.deploy.master.{DriverState, Master}
-import org.apache.spark.deploy.master.DriverState.DriverState
import org.apache.spark.deploy.worker.ui.WorkerWebUI
import org.apache.spark.metrics.MetricsSystem
import org.apache.spark.util.{AkkaUtils, Utils}
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala
index 3ed528e6b3..d35d5be73f 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala
@@ -17,9 +17,10 @@
package org.apache.spark.deploy.worker
-import org.apache.spark.util.{Utils, IntParam, MemoryParam}
import java.lang.management.ManagementFactory
+import org.apache.spark.util.{IntParam, MemoryParam, Utils}
+
/**
* Command-line parser for the master.
*/
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala
index 86688e4424..bdf126f93a 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala
@@ -18,11 +18,11 @@
package org.apache.spark.deploy.worker.ui
import java.io.File
-
import javax.servlet.http.HttpServletRequest
+
import org.eclipse.jetty.server.{Handler, Server}
-import org.apache.spark.{Logging, SparkConf}
+import org.apache.spark.Logging
import org.apache.spark.deploy.worker.Worker
import org.apache.spark.ui.{JettyUtils, UIUtils}
import org.apache.spark.ui.JettyUtils._
diff --git a/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala b/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
index 45b43b403d..0aae569b17 100644
--- a/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
+++ b/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
@@ -22,7 +22,7 @@ import java.nio.ByteBuffer
import akka.actor._
import akka.remote._
-import org.apache.spark.{SparkConf, SparkContext, Logging}
+import org.apache.spark.{Logging, SparkConf}
import org.apache.spark.TaskState.TaskState
import org.apache.spark.deploy.worker.WorkerWatcher
import org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages._
diff --git a/core/src/main/scala/org/apache/spark/executor/ExecutorBackend.scala b/core/src/main/scala/org/apache/spark/executor/ExecutorBackend.scala
index ad7dd34c76..3d34960653 100644
--- a/core/src/main/scala/org/apache/spark/executor/ExecutorBackend.scala
+++ b/core/src/main/scala/org/apache/spark/executor/ExecutorBackend.scala
@@ -18,6 +18,7 @@
package org.apache.spark.executor
import java.nio.ByteBuffer
+
import org.apache.spark.TaskState.TaskState
/**
diff --git a/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala b/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala
index c2e973e173..127f5e90f3 100644
--- a/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala
+++ b/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala
@@ -17,12 +17,11 @@
package org.apache.spark.executor
-import com.codahale.metrics.{Gauge, MetricRegistry}
+import scala.collection.JavaConversions._
+import com.codahale.metrics.{Gauge, MetricRegistry}
import org.apache.hadoop.fs.FileSystem
-import scala.collection.JavaConversions._
-
import org.apache.spark.metrics.source.Source
class ExecutorSource(val executor: Executor, executorId: String) extends Source {
diff --git a/core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala b/core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala
index b56d8c9912..6fc702fdb1 100644
--- a/core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala
+++ b/core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala
@@ -20,8 +20,7 @@ package org.apache.spark.executor
import java.nio.ByteBuffer
import com.google.protobuf.ByteString
-
-import org.apache.mesos.{Executor => MesosExecutor, MesosExecutorDriver, MesosNativeLibrary, ExecutorDriver}
+import org.apache.mesos.{Executor => MesosExecutor, ExecutorDriver, MesosExecutorDriver, MesosNativeLibrary}
import org.apache.mesos.Protos.{TaskStatus => MesosTaskStatus, _}
import org.apache.spark.Logging
@@ -29,7 +28,6 @@ import org.apache.spark.TaskState
import org.apache.spark.TaskState.TaskState
import org.apache.spark.util.Utils
-
private[spark] class MesosExecutorBackend
extends MesosExecutor
with ExecutorBackend
diff --git a/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala b/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala
index 5980177320..848b5c439b 100644
--- a/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala
+++ b/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala
@@ -20,10 +20,9 @@ package org.apache.spark.io
import java.io.{InputStream, OutputStream}
import com.ning.compress.lzf.{LZFInputStream, LZFOutputStream}
-
import org.xerial.snappy.{SnappyInputStream, SnappyOutputStream}
-import org.apache.spark.{SparkEnv, SparkConf}
+import org.apache.spark.SparkConf
/**
* CompressionCodec allows the customization of choosing different compression implementations
diff --git a/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala b/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala
index e54ac0b332..6883a54494 100644
--- a/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala
@@ -17,8 +17,8 @@
package org.apache.spark.metrics
+import java.io.{FileInputStream, InputStream}
import java.util.Properties
-import java.io.{File, FileInputStream, InputStream, IOException}
import scala.collection.mutable
import scala.util.matching.Regex
diff --git a/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala b/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
index de233e416a..966c092124 100644
--- a/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
@@ -17,14 +17,14 @@
package org.apache.spark.metrics
-import com.codahale.metrics.{Metric, MetricFilter, MetricRegistry}
-
import java.util.Properties
import java.util.concurrent.TimeUnit
import scala.collection.mutable
-import org.apache.spark.{SparkConf, Logging}
+import com.codahale.metrics.{Metric, MetricFilter, MetricRegistry}
+
+import org.apache.spark.{Logging, SparkConf}
import org.apache.spark.metrics.sink.{MetricsServlet, Sink}
import org.apache.spark.metrics.source.Source
diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala
index bce257d6e6..98fa1dbd7c 100644
--- a/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala
@@ -17,11 +17,11 @@
package org.apache.spark.metrics.sink
-import com.codahale.metrics.{ConsoleReporter, MetricRegistry}
-
import java.util.Properties
import java.util.concurrent.TimeUnit
+import com.codahale.metrics.{ConsoleReporter, MetricRegistry}
+
import org.apache.spark.metrics.MetricsSystem
class ConsoleSink(val property: Properties, val registry: MetricRegistry) extends Sink {
diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala
index 3d1a06a395..40f64768e6 100644
--- a/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala
@@ -17,12 +17,12 @@
package org.apache.spark.metrics.sink
-import com.codahale.metrics.{CsvReporter, MetricRegistry}
-
import java.io.File
import java.util.{Locale, Properties}
import java.util.concurrent.TimeUnit
+import com.codahale.metrics.{CsvReporter, MetricRegistry}
+
import org.apache.spark.metrics.MetricsSystem
class CsvSink(val property: Properties, val registry: MetricRegistry) extends Sink {
diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/GangliaSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/GangliaSink.scala
index b924907070..410ca0704b 100644
--- a/core/src/main/scala/org/apache/spark/metrics/sink/GangliaSink.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/GangliaSink.scala
@@ -20,8 +20,8 @@ package org.apache.spark.metrics.sink
import java.util.Properties
import java.util.concurrent.TimeUnit
-import com.codahale.metrics.ganglia.GangliaReporter
import com.codahale.metrics.MetricRegistry
+import com.codahale.metrics.ganglia.GangliaReporter
import info.ganglia.gmetric4j.gmetric.GMetric
import org.apache.spark.metrics.MetricsSystem
diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala
index cdcfec8ca7..e09be00142 100644
--- a/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala
@@ -17,12 +17,12 @@
package org.apache.spark.metrics.sink
+import java.net.InetSocketAddress
import java.util.Properties
import java.util.concurrent.TimeUnit
-import java.net.InetSocketAddress
import com.codahale.metrics.MetricRegistry
-import com.codahale.metrics.graphite.{GraphiteReporter, Graphite}
+import com.codahale.metrics.graphite.{Graphite, GraphiteReporter}
import org.apache.spark.metrics.MetricsSystem
diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala
index 621d086d41..b5cf210af2 100644
--- a/core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala
@@ -17,10 +17,10 @@
package org.apache.spark.metrics.sink
-import com.codahale.metrics.{JmxReporter, MetricRegistry}
-
import java.util.Properties
+import com.codahale.metrics.{JmxReporter, MetricRegistry}
+
class JmxSink(val property: Properties, val registry: MetricRegistry) extends Sink {
val reporter: JmxReporter = JmxReporter.forRegistry(registry).build()
diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala b/core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala
index 99357fede6..3cdfe26d40 100644
--- a/core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala
@@ -17,15 +17,13 @@
package org.apache.spark.metrics.sink
-import com.codahale.metrics.MetricRegistry
-import com.codahale.metrics.json.MetricsModule
-
-import com.fasterxml.jackson.databind.ObjectMapper
-
import java.util.Properties
import java.util.concurrent.TimeUnit
import javax.servlet.http.HttpServletRequest
+import com.codahale.metrics.MetricRegistry
+import com.codahale.metrics.json.MetricsModule
+import com.fasterxml.jackson.databind.ObjectMapper
import org.eclipse.jetty.server.Handler
import org.apache.spark.ui.JettyUtils
diff --git a/core/src/main/scala/org/apache/spark/network/BufferMessage.scala b/core/src/main/scala/org/apache/spark/network/BufferMessage.scala
index fb4c65909a..d3c09b1606 100644
--- a/core/src/main/scala/org/apache/spark/network/BufferMessage.scala
+++ b/core/src/main/scala/org/apache/spark/network/BufferMessage.scala
@@ -23,7 +23,6 @@ import scala.collection.mutable.ArrayBuffer
import org.apache.spark.storage.BlockManager
-
private[spark]
class BufferMessage(id_ : Int, val buffers: ArrayBuffer[ByteBuffer], var ackId: Int)
extends Message(Message.BUFFER_MESSAGE, id_) {
diff --git a/core/src/main/scala/org/apache/spark/network/Connection.scala b/core/src/main/scala/org/apache/spark/network/Connection.scala
index ae2007e41b..f2e3c1a14e 100644
--- a/core/src/main/scala/org/apache/spark/network/Connection.scala
+++ b/core/src/main/scala/org/apache/spark/network/Connection.scala
@@ -17,16 +17,13 @@
package org.apache.spark.network
-import org.apache.spark._
-
-import scala.collection.mutable.{HashMap, Queue, ArrayBuffer}
-
-import java.io._
+import java.net._
import java.nio._
import java.nio.channels._
-import java.nio.channels.spi._
-import java.net._
+import scala.collection.mutable.{ArrayBuffer, HashMap, Queue}
+
+import org.apache.spark._
private[spark]
abstract class Connection(val channel: SocketChannel, val selector: Selector,
diff --git a/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala b/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala
index a78d6ac70f..3dd82bee0b 100644
--- a/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala
+++ b/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala
@@ -17,24 +17,21 @@
package org.apache.spark.network
-import org.apache.spark._
-
+import java.net._
import java.nio._
import java.nio.channels._
import java.nio.channels.spi._
-import java.net._
import java.util.concurrent.{LinkedBlockingDeque, TimeUnit, ThreadPoolExecutor}
-import scala.collection.mutable.HashSet
+import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.HashMap
+import scala.collection.mutable.HashSet
import scala.collection.mutable.SynchronizedMap
import scala.collection.mutable.SynchronizedQueue
-import scala.collection.mutable.ArrayBuffer
-
-import scala.concurrent.{Await, Promise, ExecutionContext, Future}
-import scala.concurrent.duration.Duration
+import scala.concurrent.{Await, ExecutionContext, Future, Promise}
import scala.concurrent.duration._
+import org.apache.spark._
import org.apache.spark.util.Utils
private[spark] class ConnectionManager(port: Int, conf: SparkConf) extends Logging {
diff --git a/core/src/main/scala/org/apache/spark/network/ConnectionManagerId.scala b/core/src/main/scala/org/apache/spark/network/ConnectionManagerId.scala
index 50dd9bc2d1..b82edb6850 100644
--- a/core/src/main/scala/org/apache/spark/network/ConnectionManagerId.scala
+++ b/core/src/main/scala/org/apache/spark/network/ConnectionManagerId.scala
@@ -21,7 +21,6 @@ import java.net.InetSocketAddress
import org.apache.spark.util.Utils
-
private[spark] case class ConnectionManagerId(host: String, port: Int) {
// DEBUG code
Utils.checkHost(host)
diff --git a/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala b/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala
index 8e5c5296cb..35f64134b0 100644
--- a/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala
+++ b/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala
@@ -17,16 +17,13 @@
package org.apache.spark.network
-import org.apache.spark._
-import org.apache.spark.SparkContext._
-
-import scala.io.Source
-
import java.nio.ByteBuffer
-import java.net.InetAddress
import scala.concurrent.Await
import scala.concurrent.duration._
+import scala.io.Source
+
+import org.apache.spark._
private[spark] object ConnectionManagerTest extends Logging{
def main(args: Array[String]) {
diff --git a/core/src/main/scala/org/apache/spark/network/Message.scala b/core/src/main/scala/org/apache/spark/network/Message.scala
index 2612884bdb..20fe676618 100644
--- a/core/src/main/scala/org/apache/spark/network/Message.scala
+++ b/core/src/main/scala/org/apache/spark/network/Message.scala
@@ -17,12 +17,11 @@
package org.apache.spark.network
-import java.nio.ByteBuffer
import java.net.InetSocketAddress
+import java.nio.ByteBuffer
import scala.collection.mutable.ArrayBuffer
-
private[spark] abstract class Message(val typ: Long, val id: Int) {
var senderAddress: InetSocketAddress = null
var started = false
diff --git a/core/src/main/scala/org/apache/spark/network/MessageChunk.scala b/core/src/main/scala/org/apache/spark/network/MessageChunk.scala
index e0fe57b80d..d0f986a12b 100644
--- a/core/src/main/scala/org/apache/spark/network/MessageChunk.scala
+++ b/core/src/main/scala/org/apache/spark/network/MessageChunk.scala
@@ -21,7 +21,6 @@ import java.nio.ByteBuffer
import scala.collection.mutable.ArrayBuffer
-
private[network]
class MessageChunk(val header: MessageChunkHeader, val buffer: ByteBuffer) {
diff --git a/core/src/main/scala/org/apache/spark/network/MessageChunkHeader.scala b/core/src/main/scala/org/apache/spark/network/MessageChunkHeader.scala
index 235fbc39b3..9bcbc6141a 100644
--- a/core/src/main/scala/org/apache/spark/network/MessageChunkHeader.scala
+++ b/core/src/main/scala/org/apache/spark/network/MessageChunkHeader.scala
@@ -21,7 +21,6 @@ import java.net.InetAddress
import java.net.InetSocketAddress
import java.nio.ByteBuffer
-
private[spark] class MessageChunkHeader(
val typ: Long,
val id: Int,
diff --git a/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala b/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala
index 1c9d6030d6..9976255c7e 100644
--- a/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala
+++ b/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala
@@ -18,7 +18,7 @@
package org.apache.spark.network
import java.nio.ByteBuffer
-import java.net.InetAddress
+
import org.apache.spark.SparkConf
private[spark] object ReceiverTest {
diff --git a/core/src/main/scala/org/apache/spark/network/SenderTest.scala b/core/src/main/scala/org/apache/spark/network/SenderTest.scala
index 162d49bf61..646f8425d9 100644
--- a/core/src/main/scala/org/apache/spark/network/SenderTest.scala
+++ b/core/src/main/scala/org/apache/spark/network/SenderTest.scala
@@ -18,7 +18,7 @@
package org.apache.spark.network
import java.nio.ByteBuffer
-import java.net.InetAddress
+
import org.apache.spark.SparkConf
private[spark] object SenderTest {
diff --git a/core/src/main/scala/org/apache/spark/network/netty/FileHeader.scala b/core/src/main/scala/org/apache/spark/network/netty/FileHeader.scala
index 1b9fa1e53a..f9082ffb91 100644
--- a/core/src/main/scala/org/apache/spark/network/netty/FileHeader.scala
+++ b/core/src/main/scala/org/apache/spark/network/netty/FileHeader.scala
@@ -20,7 +20,7 @@ package org.apache.spark.network.netty
import io.netty.buffer._
import org.apache.spark.Logging
-import org.apache.spark.storage.{TestBlockId, BlockId}
+import org.apache.spark.storage.{BlockId, TestBlockId}
private[spark] class FileHeader (
val fileLen: Int,
diff --git a/core/src/main/scala/org/apache/spark/network/netty/ShuffleCopier.scala b/core/src/main/scala/org/apache/spark/network/netty/ShuffleCopier.scala
index d87157e12c..e7b2855e1e 100644
--- a/core/src/main/scala/org/apache/spark/network/netty/ShuffleCopier.scala
+++ b/core/src/main/scala/org/apache/spark/network/netty/ShuffleCopier.scala
@@ -19,17 +19,16 @@ package org.apache.spark.network.netty
import java.util.concurrent.Executors
+import scala.collection.JavaConverters._
+
import io.netty.buffer.ByteBuf
import io.netty.channel.ChannelHandlerContext
import io.netty.util.CharsetUtil
-import org.apache.spark.{SparkContext, SparkConf, Logging}
+import org.apache.spark.{Logging, SparkConf}
import org.apache.spark.network.ConnectionManagerId
-
-import scala.collection.JavaConverters._
import org.apache.spark.storage.BlockId
-
private[spark] class ShuffleCopier(conf: SparkConf) extends Logging {
def getBlock(host: String, port: Int, blockId: BlockId,
diff --git a/core/src/main/scala/org/apache/spark/network/netty/ShuffleSender.scala b/core/src/main/scala/org/apache/spark/network/netty/ShuffleSender.scala
index 44204a8c46..7ef7aecc6a 100644
--- a/core/src/main/scala/org/apache/spark/network/netty/ShuffleSender.scala
+++ b/core/src/main/scala/org/apache/spark/network/netty/ShuffleSender.scala
@@ -23,7 +23,6 @@ import org.apache.spark.Logging
import org.apache.spark.util.Utils
import org.apache.spark.storage.{BlockId, FileSegment}
-
private[spark] class ShuffleSender(portIn: Int, val pResolver: PathResolver) extends Logging {
val server = new FileServer(pResolver, portIn)
diff --git a/core/src/main/scala/org/apache/spark/partial/ApproximateActionListener.scala b/core/src/main/scala/org/apache/spark/partial/ApproximateActionListener.scala
index 423ff67a5f..d25452daf7 100644
--- a/core/src/main/scala/org/apache/spark/partial/ApproximateActionListener.scala
+++ b/core/src/main/scala/org/apache/spark/partial/ApproximateActionListener.scala
@@ -18,8 +18,8 @@
package org.apache.spark.partial
import org.apache.spark._
-import org.apache.spark.scheduler.JobListener
import org.apache.spark.rdd.RDD
+import org.apache.spark.scheduler.JobListener
/**
* A JobListener for an approximate single-result action, such as count() or non-parallel reduce().
diff --git a/core/src/main/scala/org/apache/spark/partial/GroupedCountEvaluator.scala b/core/src/main/scala/org/apache/spark/partial/GroupedCountEvaluator.scala
index e519e3a548..40b70baabc 100644
--- a/core/src/main/scala/org/apache/spark/partial/GroupedCountEvaluator.scala
+++ b/core/src/main/scala/org/apache/spark/partial/GroupedCountEvaluator.scala
@@ -18,14 +18,12 @@
package org.apache.spark.partial
import java.util.{HashMap => JHashMap}
-import java.util.{Map => JMap}
+import scala.collection.JavaConversions.mapAsScalaMap
import scala.collection.Map
import scala.collection.mutable.HashMap
-import scala.collection.JavaConversions.mapAsScalaMap
import cern.jet.stat.Probability
-
import it.unimi.dsi.fastutil.objects.{Object2LongOpenHashMap => OLMap}
/**
diff --git a/core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala b/core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala
index cf8a5680b6..b5111891ed 100644
--- a/core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala
+++ b/core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala
@@ -18,11 +18,10 @@
package org.apache.spark.partial
import java.util.{HashMap => JHashMap}
-import java.util.{Map => JMap}
-import scala.collection.mutable.HashMap
-import scala.collection.Map
import scala.collection.JavaConversions.mapAsScalaMap
+import scala.collection.Map
+import scala.collection.mutable.HashMap
import org.apache.spark.util.StatCounter
diff --git a/core/src/main/scala/org/apache/spark/partial/GroupedSumEvaluator.scala b/core/src/main/scala/org/apache/spark/partial/GroupedSumEvaluator.scala
index 8225a5d933..442fb86227 100644
--- a/core/src/main/scala/org/apache/spark/partial/GroupedSumEvaluator.scala
+++ b/core/src/main/scala/org/apache/spark/partial/GroupedSumEvaluator.scala
@@ -18,11 +18,10 @@
package org.apache.spark.partial
import java.util.{HashMap => JHashMap}
-import java.util.{Map => JMap}
-import scala.collection.mutable.HashMap
-import scala.collection.Map
import scala.collection.JavaConversions.mapAsScalaMap
+import scala.collection.Map
+import scala.collection.mutable.HashMap
import org.apache.spark.util.StatCounter
diff --git a/core/src/main/scala/org/apache/spark/rdd/BlockRDD.scala b/core/src/main/scala/org/apache/spark/rdd/BlockRDD.scala
index 424354ae16..e6c4a6d379 100644
--- a/core/src/main/scala/org/apache/spark/rdd/BlockRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/BlockRDD.scala
@@ -19,7 +19,7 @@ package org.apache.spark.rdd
import scala.reflect.ClassTag
-import org.apache.spark.{SparkContext, SparkEnv, Partition, TaskContext}
+import org.apache.spark.{Partition, SparkContext, SparkEnv, TaskContext}
import org.apache.spark.storage.{BlockId, BlockManager}
private[spark] class BlockRDDPartition(val blockId: BlockId, idx: Int) extends Partition {
diff --git a/core/src/main/scala/org/apache/spark/rdd/CartesianRDD.scala b/core/src/main/scala/org/apache/spark/rdd/CartesianRDD.scala
index 87b950ba43..4908711d17 100644
--- a/core/src/main/scala/org/apache/spark/rdd/CartesianRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/CartesianRDD.scala
@@ -17,10 +17,11 @@
package org.apache.spark.rdd
-import java.io.{ObjectOutputStream, IOException}
+import java.io.{IOException, ObjectOutputStream}
+
import scala.reflect.ClassTag
-import org.apache.spark._
+import org.apache.spark._
private[spark]
class CartesianPartition(
diff --git a/core/src/main/scala/org/apache/spark/rdd/CheckpointRDD.scala b/core/src/main/scala/org/apache/spark/rdd/CheckpointRDD.scala
index 8f9d1d5a84..888af541cf 100644
--- a/core/src/main/scala/org/apache/spark/rdd/CheckpointRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/CheckpointRDD.scala
@@ -18,12 +18,15 @@
package org.apache.spark.rdd
import java.io.IOException
+
import scala.reflect.ClassTag
+
+import org.apache.hadoop.conf.Configuration
+import org.apache.hadoop.fs.Path
+
import org.apache.spark._
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.deploy.SparkHadoopUtil
-import org.apache.hadoop.conf.Configuration
-import org.apache.hadoop.fs.Path
private[spark] class CheckpointRDDPartition(val index: Int) extends Partition {}
diff --git a/core/src/main/scala/org/apache/spark/rdd/CoGroupedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/CoGroupedRDD.scala
index 0e47f2e022..699a10c96c 100644
--- a/core/src/main/scala/org/apache/spark/rdd/CoGroupedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/CoGroupedRDD.scala
@@ -17,7 +17,7 @@
package org.apache.spark.rdd
-import java.io.{ObjectOutputStream, IOException}
+import java.io.{IOException, ObjectOutputStream}
import scala.collection.mutable.ArrayBuffer
diff --git a/core/src/main/scala/org/apache/spark/rdd/CoalescedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/CoalescedRDD.scala
index dc345b2df0..4e82b51313 100644
--- a/core/src/main/scala/org/apache/spark/rdd/CoalescedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/CoalescedRDD.scala
@@ -17,13 +17,14 @@
package org.apache.spark.rdd
-import org.apache.spark._
-import java.io.{ObjectOutputStream, IOException}
+import java.io.{IOException, ObjectOutputStream}
+
import scala.collection.mutable
-import scala.Some
import scala.collection.mutable.ArrayBuffer
import scala.reflect.ClassTag
+import org.apache.spark._
+
/**
* Class that captures a coalesced RDD by essentially keeping track of parent partitions
* @param index of this coalesced partition
diff --git a/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala b/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala
index 20713b4249..a7b6b3b514 100644
--- a/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala
@@ -17,14 +17,12 @@
package org.apache.spark.rdd
+import org.apache.spark.{TaskContext, Logging}
import org.apache.spark.partial.BoundedDouble
import org.apache.spark.partial.MeanEvaluator
import org.apache.spark.partial.PartialResult
import org.apache.spark.partial.SumEvaluator
import org.apache.spark.util.StatCounter
-import org.apache.spark.{TaskContext, Logging}
-
-import scala.collection.immutable.NumericRange
/**
* Extra functions available on RDDs of Doubles through an implicit conversion.
diff --git a/core/src/main/scala/org/apache/spark/rdd/FilteredRDD.scala b/core/src/main/scala/org/apache/spark/rdd/FilteredRDD.scala
index e74c83b90b..9e41b3d1e2 100644
--- a/core/src/main/scala/org/apache/spark/rdd/FilteredRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/FilteredRDD.scala
@@ -17,9 +17,10 @@
package org.apache.spark.rdd
-import org.apache.spark.{OneToOneDependency, Partition, TaskContext}
import scala.reflect.ClassTag
+import org.apache.spark.{Partition, TaskContext}
+
private[spark] class FilteredRDD[T: ClassTag](
prev: RDD[T],
f: T => Boolean)
diff --git a/core/src/main/scala/org/apache/spark/rdd/FlatMappedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/FlatMappedRDD.scala
index 4d1878fc14..d8f87d4e36 100644
--- a/core/src/main/scala/org/apache/spark/rdd/FlatMappedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/FlatMappedRDD.scala
@@ -17,9 +17,9 @@
package org.apache.spark.rdd
-import org.apache.spark.{Partition, TaskContext}
import scala.reflect.ClassTag
+import org.apache.spark.{Partition, TaskContext}
private[spark]
class FlatMappedRDD[U: ClassTag, T: ClassTag](
diff --git a/core/src/main/scala/org/apache/spark/rdd/FlatMappedValuesRDD.scala b/core/src/main/scala/org/apache/spark/rdd/FlatMappedValuesRDD.scala
index 82000bac09..7c9023f62d 100644
--- a/core/src/main/scala/org/apache/spark/rdd/FlatMappedValuesRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/FlatMappedValuesRDD.scala
@@ -17,8 +17,7 @@
package org.apache.spark.rdd
-import org.apache.spark.{TaskContext, Partition}
-
+import org.apache.spark.{Partition, TaskContext}
private[spark]
class FlatMappedValuesRDD[K, V, U](prev: RDD[_ <: Product2[K, V]], f: V => TraversableOnce[U])
diff --git a/core/src/main/scala/org/apache/spark/rdd/GlommedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/GlommedRDD.scala
index 1a694475f6..f6463fa715 100644
--- a/core/src/main/scala/org/apache/spark/rdd/GlommedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/GlommedRDD.scala
@@ -17,9 +17,10 @@
package org.apache.spark.rdd
-import org.apache.spark.{Partition, TaskContext}
import scala.reflect.ClassTag
+import org.apache.spark.{Partition, TaskContext}
+
private[spark] class GlommedRDD[T: ClassTag](prev: RDD[T])
extends RDD[Array[T]](prev) {
diff --git a/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala b/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala
index ad74d4636f..a374fc4a87 100644
--- a/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala
@@ -19,7 +19,7 @@ package org.apache.spark.rdd
import java.io.EOFException
-import org.apache.hadoop.conf.{Configuration, Configurable}
+import org.apache.hadoop.conf.{Configurable, Configuration}
import org.apache.hadoop.mapred.InputFormat
import org.apache.hadoop.mapred.InputSplit
import org.apache.hadoop.mapred.JobConf
@@ -32,7 +32,6 @@ import org.apache.spark.broadcast.Broadcast
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.util.NextIterator
-
/**
* A Spark split class that wraps around a Hadoop InputSplit.
*/
diff --git a/core/src/main/scala/org/apache/spark/rdd/MapPartitionsRDD.scala b/core/src/main/scala/org/apache/spark/rdd/MapPartitionsRDD.scala
index db15baf503..4883fb8288 100644
--- a/core/src/main/scala/org/apache/spark/rdd/MapPartitionsRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/MapPartitionsRDD.scala
@@ -17,9 +17,10 @@
package org.apache.spark.rdd
-import org.apache.spark.{Partition, TaskContext}
import scala.reflect.ClassTag
+import org.apache.spark.{Partition, TaskContext}
+
private[spark] class MapPartitionsRDD[U: ClassTag, T: ClassTag](
prev: RDD[T],
f: (TaskContext, Int, Iterator[T]) => Iterator[U], // (TaskContext, partition index, iterator)
diff --git a/core/src/main/scala/org/apache/spark/rdd/MappedValuesRDD.scala b/core/src/main/scala/org/apache/spark/rdd/MappedValuesRDD.scala
index d33c1af581..2bc47eb9fc 100644
--- a/core/src/main/scala/org/apache/spark/rdd/MappedValuesRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/MappedValuesRDD.scala
@@ -17,8 +17,7 @@
package org.apache.spark.rdd
-
-import org.apache.spark.{TaskContext, Partition}
+import org.apache.spark.{Partition, TaskContext}
private[spark]
class MappedValuesRDD[K, V, U](prev: RDD[_ <: Product2[K, V]], f: V => U)
diff --git a/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala b/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
index 10d519e697..15bec39659 100644
--- a/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
@@ -22,12 +22,13 @@ import java.text.SimpleDateFormat
import java.util.Date
import java.util.{HashMap => JHashMap}
+import scala.collection.JavaConversions._
import scala.collection.Map
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
-import scala.collection.JavaConversions._
-import scala.reflect.{ClassTag, classTag}
+import scala.reflect.ClassTag
+import com.clearspring.analytics.stream.cardinality.HyperLogLog
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.hadoop.io.SequenceFile.CompressionType
@@ -38,15 +39,14 @@ import org.apache.hadoop.mapreduce.{Job => NewAPIHadoopJob}
import org.apache.hadoop.mapreduce.{RecordWriter => NewRecordWriter}
import org.apache.hadoop.mapreduce.lib.output.{FileOutputFormat => NewFileOutputFormat}
-import com.clearspring.analytics.stream.cardinality.HyperLogLog
-
// SparkHadoopWriter and SparkHadoopMapReduceUtil are actually source files defined in Spark.
import org.apache.hadoop.mapred.SparkHadoopWriter
import org.apache.hadoop.mapreduce.SparkHadoopMapReduceUtil
+
import org.apache.spark._
+import org.apache.spark.Partitioner.defaultPartitioner
import org.apache.spark.SparkContext._
import org.apache.spark.partial.{BoundedDouble, PartialResult}
-import org.apache.spark.Partitioner.defaultPartitioner
import org.apache.spark.util.SerializableHyperLogLog
/**
diff --git a/core/src/main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala b/core/src/main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala
index f270c1ac21..5f03d7d650 100644
--- a/core/src/main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala
@@ -17,14 +17,15 @@
package org.apache.spark.rdd
+import java.io._
+
+import scala.Serializable
+import scala.collection.Map
import scala.collection.immutable.NumericRange
import scala.collection.mutable.ArrayBuffer
-import scala.collection.Map
import scala.reflect.ClassTag
import org.apache.spark._
-import java.io._
-import scala.Serializable
import org.apache.spark.serializer.JavaSerializer
import org.apache.spark.util.Utils
diff --git a/core/src/main/scala/org/apache/spark/rdd/PartitionPruningRDD.scala b/core/src/main/scala/org/apache/spark/rdd/PartitionPruningRDD.scala
index ea8885b36e..b0440ca7f3 100644
--- a/core/src/main/scala/org/apache/spark/rdd/PartitionPruningRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/PartitionPruningRDD.scala
@@ -19,8 +19,7 @@ package org.apache.spark.rdd
import scala.reflect.ClassTag
-import org.apache.spark.{NarrowDependency, SparkEnv, Partition, TaskContext}
-
+import org.apache.spark.{NarrowDependency, Partition, TaskContext}
class PartitionPruningRDDPartition(idx: Int, val parentSplit: Partition) extends Partition {
override val index = idx
diff --git a/core/src/main/scala/org/apache/spark/rdd/PartitionerAwareUnionRDD.scala b/core/src/main/scala/org/apache/spark/rdd/PartitionerAwareUnionRDD.scala
index f4364329a3..a84357b384 100644
--- a/core/src/main/scala/org/apache/spark/rdd/PartitionerAwareUnionRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/PartitionerAwareUnionRDD.scala
@@ -17,10 +17,11 @@
package org.apache.spark.rdd
+import java.io.{IOException, ObjectOutputStream}
+
import scala.reflect.ClassTag
-import java.io.{ObjectOutputStream, IOException}
-import org.apache.spark.{TaskContext, OneToOneDependency, SparkContext, Partition}
+import org.apache.spark.{OneToOneDependency, Partition, SparkContext, TaskContext}
/**
* Class representing partitions of PartitionerAwareUnionRDD, which maintains the list of
diff --git a/core/src/main/scala/org/apache/spark/rdd/PartitionwiseSampledRDD.scala b/core/src/main/scala/org/apache/spark/rdd/PartitionwiseSampledRDD.scala
index a74309d861..ce4c0d382b 100644
--- a/core/src/main/scala/org/apache/spark/rdd/PartitionwiseSampledRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/PartitionwiseSampledRDD.scala
@@ -21,7 +21,7 @@ import java.util.Random
import scala.reflect.ClassTag
-import org.apache.spark.{TaskContext, Partition}
+import org.apache.spark.{Partition, TaskContext}
import org.apache.spark.util.random.RandomSampler
private[spark]
diff --git a/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala
index 8ef919c4b5..abd4414e81 100644
--- a/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala
@@ -20,14 +20,13 @@ package org.apache.spark.rdd
import java.io.PrintWriter
import java.util.StringTokenizer
-import scala.collection.Map
import scala.collection.JavaConversions._
+import scala.collection.Map
import scala.collection.mutable.ArrayBuffer
import scala.io.Source
import scala.reflect.ClassTag
-import org.apache.spark.{SparkEnv, Partition, TaskContext}
-
+import org.apache.spark.{Partition, SparkEnv, TaskContext}
/**
* An RDD that pipes the contents of each parent partition through an external command
diff --git a/core/src/main/scala/org/apache/spark/rdd/RDD.scala b/core/src/main/scala/org/apache/spark/rdd/RDD.scala
index d4fc28f551..50320f4035 100644
--- a/core/src/main/scala/org/apache/spark/rdd/RDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/RDD.scala
@@ -22,30 +22,27 @@ import java.util.Random
import scala.collection.Map
import scala.collection.JavaConversions.mapAsScalaMap
import scala.collection.mutable.ArrayBuffer
-
import scala.reflect.{classTag, ClassTag}
+import com.clearspring.analytics.stream.cardinality.HyperLogLog
+import it.unimi.dsi.fastutil.objects.{Object2LongOpenHashMap => OLMap}
import org.apache.hadoop.io.BytesWritable
import org.apache.hadoop.io.compress.CompressionCodec
import org.apache.hadoop.io.NullWritable
import org.apache.hadoop.io.Text
import org.apache.hadoop.mapred.TextOutputFormat
-import it.unimi.dsi.fastutil.objects.{Object2LongOpenHashMap => OLMap}
-import com.clearspring.analytics.stream.cardinality.HyperLogLog
-
+import org.apache.spark._
import org.apache.spark.Partitioner._
+import org.apache.spark.SparkContext._
import org.apache.spark.api.java.JavaRDD
import org.apache.spark.partial.BoundedDouble
import org.apache.spark.partial.CountEvaluator
import org.apache.spark.partial.GroupedCountEvaluator
import org.apache.spark.partial.PartialResult
import org.apache.spark.storage.StorageLevel
-import org.apache.spark.util.{Utils, BoundedPriorityQueue, SerializableHyperLogLog}
-
-import org.apache.spark.SparkContext._
-import org.apache.spark._
-import org.apache.spark.util.random.{PoissonSampler, BernoulliSampler}
+import org.apache.spark.util.{BoundedPriorityQueue, SerializableHyperLogLog, Utils}
+import org.apache.spark.util.random.{BernoulliSampler, PoissonSampler}
/**
* A Resilient Distributed Dataset (RDD), the basic abstraction in Spark. Represents an immutable,
diff --git a/core/src/main/scala/org/apache/spark/rdd/RDDCheckpointData.scala b/core/src/main/scala/org/apache/spark/rdd/RDDCheckpointData.scala
index 73e8769c09..953f0555e5 100644
--- a/core/src/main/scala/org/apache/spark/rdd/RDDCheckpointData.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/RDDCheckpointData.scala
@@ -20,9 +20,8 @@ package org.apache.spark.rdd
import scala.reflect.ClassTag
import org.apache.hadoop.fs.Path
-import org.apache.hadoop.conf.Configuration
-import org.apache.spark.{SerializableWritable, Partition, SparkException, Logging}
+import org.apache.spark.{Logging, Partition, SerializableWritable, SparkException}
import org.apache.spark.scheduler.{ResultTask, ShuffleMapTask}
/**
diff --git a/core/src/main/scala/org/apache/spark/rdd/SampledRDD.scala b/core/src/main/scala/org/apache/spark/rdd/SampledRDD.scala
index 08534b6f1d..b50307cfa4 100644
--- a/core/src/main/scala/org/apache/spark/rdd/SampledRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/SampledRDD.scala
@@ -17,9 +17,10 @@
package org.apache.spark.rdd
-import scala.reflect.ClassTag
import java.util.Random
+import scala.reflect.ClassTag
+
import cern.jet.random.Poisson
import cern.jet.random.engine.DRand
diff --git a/core/src/main/scala/org/apache/spark/rdd/SequenceFileRDDFunctions.scala b/core/src/main/scala/org/apache/spark/rdd/SequenceFileRDDFunctions.scala
index c9b4c768a9..7df9a2960d 100644
--- a/core/src/main/scala/org/apache/spark/rdd/SequenceFileRDDFunctions.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/SequenceFileRDDFunctions.scala
@@ -16,15 +16,15 @@
*/
package org.apache.spark.rdd
-import scala.reflect.{ ClassTag, classTag}
+import scala.reflect.{ClassTag, classTag}
+import org.apache.hadoop.io.Writable
+import org.apache.hadoop.io.compress.CompressionCodec
import org.apache.hadoop.mapred.JobConf
import org.apache.hadoop.mapred.SequenceFileOutputFormat
-import org.apache.hadoop.io.compress.CompressionCodec
-import org.apache.hadoop.io.Writable
-import org.apache.spark.SparkContext._
import org.apache.spark.Logging
+import org.apache.spark.SparkContext._
/**
* Extra functions available on RDDs of (key, value) pairs to create a Hadoop SequenceFile,
diff --git a/core/src/main/scala/org/apache/spark/rdd/ShuffledRDD.scala b/core/src/main/scala/org/apache/spark/rdd/ShuffledRDD.scala
index 0ccb309d0d..0bbda25a90 100644
--- a/core/src/main/scala/org/apache/spark/rdd/ShuffledRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/ShuffledRDD.scala
@@ -19,8 +19,7 @@ package org.apache.spark.rdd
import scala.reflect.ClassTag
-import org.apache.spark.{Dependency, Partition, Partitioner, ShuffleDependency,
- SparkEnv, TaskContext}
+import org.apache.spark.{Dependency, Partition, Partitioner, ShuffleDependency, SparkEnv, TaskContext}
private[spark] class ShuffledRDDPartition(val idx: Int) extends Partition {
override val index = idx
diff --git a/core/src/main/scala/org/apache/spark/rdd/SubtractedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/SubtractedRDD.scala
index 4f90c7d3d6..5fe9f363db 100644
--- a/core/src/main/scala/org/apache/spark/rdd/SubtractedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/SubtractedRDD.scala
@@ -23,14 +23,13 @@ import scala.collection.JavaConversions._
import scala.collection.mutable.ArrayBuffer
import scala.reflect.ClassTag
-import org.apache.spark.Partitioner
import org.apache.spark.Dependency
-import org.apache.spark.TaskContext
+import org.apache.spark.OneToOneDependency
import org.apache.spark.Partition
-import org.apache.spark.SparkEnv
+import org.apache.spark.Partitioner
import org.apache.spark.ShuffleDependency
-import org.apache.spark.OneToOneDependency
-
+import org.apache.spark.SparkEnv
+import org.apache.spark.TaskContext
/**
* An optimized version of cogroup for set difference/subtraction.
diff --git a/core/src/main/scala/org/apache/spark/rdd/UnionRDD.scala b/core/src/main/scala/org/apache/spark/rdd/UnionRDD.scala
index 08a41ac558..a447030752 100644
--- a/core/src/main/scala/org/apache/spark/rdd/UnionRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/UnionRDD.scala
@@ -17,12 +17,12 @@
package org.apache.spark.rdd
+import java.io.{IOException, ObjectOutputStream}
+
import scala.collection.mutable.ArrayBuffer
import scala.reflect.ClassTag
-import org.apache.spark.{Dependency, RangeDependency, SparkContext, Partition, TaskContext}
-
-import java.io.{ObjectOutputStream, IOException}
+import org.apache.spark.{Dependency, Partition, RangeDependency, SparkContext, TaskContext}
private[spark] class UnionPartition[T: ClassTag](idx: Int, rdd: RDD[T], splitIndex: Int)
extends Partition {
diff --git a/core/src/main/scala/org/apache/spark/rdd/ZippedPartitionsRDD.scala b/core/src/main/scala/org/apache/spark/rdd/ZippedPartitionsRDD.scala
index 83be3c6eb4..b56643444a 100644
--- a/core/src/main/scala/org/apache/spark/rdd/ZippedPartitionsRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/ZippedPartitionsRDD.scala
@@ -17,10 +17,12 @@
package org.apache.spark.rdd
-import org.apache.spark.{OneToOneDependency, SparkContext, Partition, TaskContext}
-import java.io.{ObjectOutputStream, IOException}
+import java.io.{IOException, ObjectOutputStream}
+
import scala.reflect.ClassTag
+import org.apache.spark.{OneToOneDependency, Partition, SparkContext, TaskContext}
+
private[spark] class ZippedPartitionsPartition(
idx: Int,
@transient rdds: Seq[RDD[_]],
diff --git a/core/src/main/scala/org/apache/spark/rdd/ZippedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/ZippedRDD.scala
index fb5b070c18..2119e76f0e 100644
--- a/core/src/main/scala/org/apache/spark/rdd/ZippedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/ZippedRDD.scala
@@ -17,12 +17,12 @@
package org.apache.spark.rdd
-import org.apache.spark.{OneToOneDependency, SparkContext, Partition, TaskContext}
-
-import java.io.{ObjectOutputStream, IOException}
+import java.io.{IOException, ObjectOutputStream}
import scala.reflect.ClassTag
+import org.apache.spark.{OneToOneDependency, Partition, SparkContext, TaskContext}
+
private[spark] class ZippedPartition[T: ClassTag, U: ClassTag](
idx: Int,
@transient rdd1: RDD[T],
diff --git a/core/src/main/scala/org/apache/spark/rdd/ZippedWithIndexRDD.scala b/core/src/main/scala/org/apache/spark/rdd/ZippedWithIndexRDD.scala
index 38dc114d80..e2c301603b 100644
--- a/core/src/main/scala/org/apache/spark/rdd/ZippedWithIndexRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/ZippedWithIndexRDD.scala
@@ -19,7 +19,7 @@ package org.apache.spark.rdd
import scala.reflect.ClassTag
-import org.apache.spark.{TaskContext, Partition}
+import org.apache.spark.{Partition, TaskContext}
import org.apache.spark.util.Utils
private[spark]
diff --git a/core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala b/core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala
index 0b04607d01..9257f48559 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala
@@ -17,10 +17,10 @@
package org.apache.spark.scheduler
-import org.apache.spark.TaskContext
-
import java.util.Properties
+import org.apache.spark.TaskContext
+
/**
* Tracks information about an active job in the DAGScheduler.
*/
diff --git a/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala b/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala
index 80211541a6..729f518b89 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala
@@ -28,9 +28,9 @@ import scala.reflect.ClassTag
import akka.actor._
import org.apache.spark._
-import org.apache.spark.rdd.RDD
import org.apache.spark.executor.TaskMetrics
import org.apache.spark.partial.{ApproximateActionListener, ApproximateEvaluator, PartialResult}
+import org.apache.spark.rdd.RDD
import org.apache.spark.storage.{BlockId, BlockManager, BlockManagerMaster, RDDBlockId}
import org.apache.spark.util.{MetadataCleaner, MetadataCleanerType, TimeStampedHashMap}
diff --git a/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala b/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala
index add1187613..39cd98e2d7 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala
@@ -22,8 +22,8 @@ import java.util.Properties
import scala.collection.mutable.Map
import org.apache.spark._
-import org.apache.spark.rdd.RDD
import org.apache.spark.executor.TaskMetrics
+import org.apache.spark.rdd.RDD
/**
* Types of events that can be handled by the DAGScheduler. The DAGScheduler uses an event queue
diff --git a/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerSource.scala b/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerSource.scala
index 7b5c0e29ad..b52fe2410a 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerSource.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerSource.scala
@@ -19,8 +19,8 @@ package org.apache.spark.scheduler
import com.codahale.metrics.{Gauge,MetricRegistry}
-import org.apache.spark.metrics.source.Source
import org.apache.spark.SparkContext
+import org.apache.spark.metrics.source.Source
private[spark] class DAGSchedulerSource(val dagScheduler: DAGScheduler, sc: SparkContext)
extends Source {
diff --git a/core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala b/core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala
index 23447f1bbf..5555585c8b 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala
@@ -17,17 +17,17 @@
package org.apache.spark.scheduler
-import org.apache.spark.{Logging, SparkEnv}
-import org.apache.spark.deploy.SparkHadoopUtil
+import scala.collection.JavaConversions._
import scala.collection.immutable.Set
+import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
+
+import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.mapred.{FileInputFormat, JobConf}
-import org.apache.hadoop.security.UserGroupInformation
-import org.apache.hadoop.util.ReflectionUtils
import org.apache.hadoop.mapreduce.Job
-import org.apache.hadoop.conf.Configuration
-import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
-import scala.collection.JavaConversions._
+import org.apache.hadoop.util.ReflectionUtils
+import org.apache.spark.Logging
+import org.apache.spark.deploy.SparkHadoopUtil
/**
* Parses and holds information about inputFormat (and files) specified as a parameter.
diff --git a/core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala b/core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala
index b909b66a5d..9d75d7c4ad 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala
@@ -17,7 +17,7 @@
package org.apache.spark.scheduler
-import java.io.{IOException, File, FileNotFoundException, PrintWriter}
+import java.io.{File, FileNotFoundException, IOException, PrintWriter}
import java.text.SimpleDateFormat
import java.util.{Date, Properties}
import java.util.concurrent.LinkedBlockingQueue
@@ -25,8 +25,8 @@ import java.util.concurrent.LinkedBlockingQueue
import scala.collection.mutable.{HashMap, HashSet, ListBuffer}
import org.apache.spark._
-import org.apache.spark.rdd.RDD
import org.apache.spark.executor.TaskMetrics
+import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
/**
diff --git a/core/src/main/scala/org/apache/spark/scheduler/MapStatus.scala b/core/src/main/scala/org/apache/spark/scheduler/MapStatus.scala
index 1c61687f28..d3f63ff92a 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/MapStatus.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/MapStatus.scala
@@ -17,8 +17,9 @@
package org.apache.spark.scheduler
+import java.io.{Externalizable, ObjectInput, ObjectOutput}
+
import org.apache.spark.storage.BlockManagerId
-import java.io.{ObjectOutput, ObjectInput, Externalizable}
/**
* Result returned by a ShuffleMapTask to a scheduler. Includes the block manager address that the
diff --git a/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala b/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala
index 77b1682b3e..3fc6cc9850 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala
@@ -23,7 +23,7 @@ import java.util.zip.{GZIPInputStream, GZIPOutputStream}
import org.apache.spark._
import org.apache.spark.rdd.RDD
import org.apache.spark.rdd.RDDCheckpointData
-import org.apache.spark.util.{MetadataCleanerType, MetadataCleaner, TimeStampedHashMap}
+import org.apache.spark.util.{MetadataCleaner, MetadataCleanerType, TimeStampedHashMap}
private[spark] object ResultTask {
diff --git a/core/src/main/scala/org/apache/spark/scheduler/Schedulable.scala b/core/src/main/scala/org/apache/spark/scheduler/Schedulable.scala
index d573e125a3..ed24eb6a54 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/Schedulable.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/Schedulable.scala
@@ -17,9 +17,10 @@
package org.apache.spark.scheduler
+import scala.collection.mutable.ArrayBuffer
+
import org.apache.spark.scheduler.SchedulingMode.SchedulingMode
-import scala.collection.mutable.ArrayBuffer
/**
* An interface for schedulable entities.
* there are two type of Schedulable entities(Pools and TaskSetManagers)
diff --git a/core/src/main/scala/org/apache/spark/scheduler/SchedulableBuilder.scala b/core/src/main/scala/org/apache/spark/scheduler/SchedulableBuilder.scala
index a546193d5b..e4eced383c 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/SchedulableBuilder.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/SchedulableBuilder.scala
@@ -20,10 +20,10 @@ package org.apache.spark.scheduler
import java.io.{FileInputStream, InputStream}
import java.util.{NoSuchElementException, Properties}
-import org.apache.spark.{SparkConf, Logging}
-
import scala.xml.XML
+import org.apache.spark.{Logging, SparkConf}
+
/**
* An interface to build Schedulable tree
* buildPools: build the tree nodes(pools)
diff --git a/core/src/main/scala/org/apache/spark/scheduler/SchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/SchedulerBackend.scala
index 02bdbba825..eefc8c232b 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/SchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/SchedulerBackend.scala
@@ -17,8 +17,6 @@
package org.apache.spark.scheduler
-import org.apache.spark.SparkContext
-
/**
* A backend interface for scheduling systems that allows plugging in different ones under
* ClusterScheduler. We assume a Mesos-like model where the application gets resource offers as
diff --git a/core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala b/core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala
index a37ead5632..77789031f4 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala
@@ -24,11 +24,10 @@ import scala.collection.mutable.HashMap
import org.apache.spark._
import org.apache.spark.executor.ShuffleWriteMetrics
-import org.apache.spark.storage._
-import org.apache.spark.util.{MetadataCleanerType, TimeStampedHashMap, MetadataCleaner}
import org.apache.spark.rdd.RDD
import org.apache.spark.rdd.RDDCheckpointData
-
+import org.apache.spark.storage._
+import org.apache.spark.util.{MetadataCleaner, MetadataCleanerType, TimeStampedHashMap}
private[spark] object ShuffleMapTask {
diff --git a/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala b/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala
index 129153c732..9590c03f10 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala
@@ -18,9 +18,10 @@
package org.apache.spark.scheduler
import java.util.Properties
-import org.apache.spark.util.{Utils, Distribution}
+
import org.apache.spark.{Logging, TaskEndReason}
import org.apache.spark.executor.TaskMetrics
+import org.apache.spark.util.{Distribution, Utils}
sealed trait SparkListenerEvents
diff --git a/core/src/main/scala/org/apache/spark/scheduler/Task.scala b/core/src/main/scala/org/apache/spark/scheduler/Task.scala
index 69b42e86ea..b85b4a50cd 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/Task.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/Task.scala
@@ -29,7 +29,6 @@ import org.apache.spark.executor.TaskMetrics
import org.apache.spark.serializer.SerializerInstance
import org.apache.spark.util.ByteBufferInputStream
-
/**
* A unit of execution. We have two kinds of Task's in Spark:
* - [[org.apache.spark.scheduler.ShuffleMapTask]]
diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskDescription.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskDescription.scala
index 5190d234d4..1481d70db4 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskDescription.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskDescription.scala
@@ -18,6 +18,7 @@
package org.apache.spark.scheduler
import java.nio.ByteBuffer
+
import org.apache.spark.util.SerializableBuffer
private[spark] class TaskDescription(
diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskInfo.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskInfo.scala
index 91c27d7b8e..6183b125de 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskInfo.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskInfo.scala
@@ -17,8 +17,6 @@
package org.apache.spark.scheduler
-import org.apache.spark.util.Utils
-
/**
* Information about a running task attempt inside a TaskSet.
*/
diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskLocality.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskLocality.scala
index 35de13c385..ea3229b75b 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskLocality.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskLocality.scala
@@ -17,7 +17,6 @@
package org.apache.spark.scheduler
-
private[spark] object TaskLocality extends Enumeration {
// process local is expected to be used ONLY within tasksetmanager for now.
val PROCESS_LOCAL, NODE_LOCAL, RACK_LOCAL, ANY = Value
diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskResult.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskResult.scala
index 5724ec9d1b..d49d8fb887 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskResult.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskResult.scala
@@ -18,13 +18,14 @@
package org.apache.spark.scheduler
import java.io._
+import java.nio.ByteBuffer
import scala.collection.mutable.Map
+
+import org.apache.spark.SparkEnv
import org.apache.spark.executor.TaskMetrics
-import org.apache.spark.{SparkEnv}
-import java.nio.ByteBuffer
-import org.apache.spark.util.Utils
import org.apache.spark.storage.BlockId
+import org.apache.spark.util.Utils
// Task result. Also contains updates to accumulator variables.
private[spark] sealed trait TaskResult[T]
diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskResultGetter.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskResultGetter.scala
index bdec08e968..cb4ad4ae93 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskResultGetter.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskResultGetter.scala
@@ -18,7 +18,6 @@
package org.apache.spark.scheduler
import java.nio.ByteBuffer
-import java.util.concurrent.{LinkedBlockingDeque, ThreadFactory, ThreadPoolExecutor, TimeUnit}
import org.apache.spark._
import org.apache.spark.TaskState.TaskState
diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
index 5b525155e9..8df37c247d 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
@@ -18,13 +18,13 @@
package org.apache.spark.scheduler
import java.nio.ByteBuffer
-import java.util.concurrent.atomic.AtomicLong
import java.util.{TimerTask, Timer}
+import java.util.concurrent.atomic.AtomicLong
+import scala.concurrent.duration._
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.HashMap
import scala.collection.mutable.HashSet
-import scala.concurrent.duration._
import org.apache.spark._
import org.apache.spark.TaskState.TaskState
diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala
index 21b2ff1682..1a4b7e599c 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala
@@ -26,13 +26,11 @@ import scala.collection.mutable.HashSet
import scala.math.max
import scala.math.min
-import org.apache.spark.{ExceptionFailure, ExecutorLostFailure, FetchFailed, Logging, Resubmitted,
- SparkEnv, Success, TaskEndReason, TaskKilled, TaskResultLost, TaskState}
+import org.apache.spark.{ExceptionFailure, ExecutorLostFailure, FetchFailed, Logging, Resubmitted, SparkEnv, Success, TaskEndReason, TaskKilled, TaskResultLost, TaskState}
import org.apache.spark.TaskState.TaskState
import org.apache.spark.executor.TaskMetrics
import org.apache.spark.util.{Clock, SystemClock}
-
/**
* Schedules the tasks within a single TaskSet in the ClusterScheduler. This class keeps track of
* each task, retries tasks if they fail (up to a limited number of times), and
diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedClusterMessage.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedClusterMessage.scala
index 53316dae2a..4a9a1659d8 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedClusterMessage.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedClusterMessage.scala
@@ -21,8 +21,7 @@ import java.nio.ByteBuffer
import org.apache.spark.TaskState.TaskState
import org.apache.spark.scheduler.TaskDescription
-import org.apache.spark.util.{Utils, SerializableBuffer}
-
+import org.apache.spark.util.{SerializableBuffer, Utils}
private[spark] sealed trait CoarseGrainedClusterMessage extends Serializable
diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
index 78204103a9..379e02eb9a 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
@@ -27,10 +27,8 @@ import akka.actor._
import akka.pattern.ask
import akka.remote.{DisassociatedEvent, RemotingLifecycleEvent}
-import org.apache.spark.{SparkException, Logging, TaskState}
import org.apache.spark.{Logging, SparkException, TaskState}
-import org.apache.spark.scheduler.{TaskSchedulerImpl, SchedulerBackend, SlaveLost, TaskDescription,
- WorkerOffer}
+import org.apache.spark.scheduler.{SchedulerBackend, SlaveLost, TaskDescription, TaskSchedulerImpl, WorkerOffer}
import org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages._
import org.apache.spark.util.{AkkaUtils, Utils}
diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
index 04f35cca08..ee4b65e312 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
@@ -17,11 +17,9 @@
package org.apache.spark.scheduler.cluster
-import scala.collection.mutable.HashMap
-
import org.apache.spark.{Logging, SparkContext}
-import org.apache.spark.deploy.client.{AppClient, AppClientListener}
import org.apache.spark.deploy.{Command, ApplicationDescription}
+import org.apache.spark.deploy.client.{AppClient, AppClientListener}
import org.apache.spark.scheduler.{ExecutorExited, ExecutorLossReason, SlaveLost, TaskSchedulerImpl}
import org.apache.spark.util.Utils
diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala
index 4401f6df47..28b019d9fd 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala
@@ -18,18 +18,17 @@
package org.apache.spark.scheduler.cluster.mesos
import java.io.File
-import java.util.{ArrayList => JArrayList, List => JList}
+import java.util.{List => JList}
import java.util.Collections
-import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
import scala.collection.JavaConversions._
+import scala.collection.mutable.{HashMap, HashSet}
-import com.google.protobuf.ByteString
import org.apache.mesos.{Scheduler => MScheduler}
import org.apache.mesos._
import org.apache.mesos.Protos.{TaskInfo => MesosTaskInfo, TaskState => MesosTaskState, _}
-import org.apache.spark.{SparkException, Logging, SparkContext, TaskState}
+import org.apache.spark.{Logging, SparkContext, SparkException}
import org.apache.spark.scheduler.TaskSchedulerImpl
import org.apache.spark.scheduler.cluster.CoarseGrainedSchedulerBackend
diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackend.scala
index fef291eea0..c576beb0c0 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackend.scala
@@ -21,17 +21,16 @@ import java.io.File
import java.util.{ArrayList => JArrayList, List => JList}
import java.util.Collections
-import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
import scala.collection.JavaConversions._
+import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
import com.google.protobuf.ByteString
import org.apache.mesos.{Scheduler => MScheduler}
import org.apache.mesos._
import org.apache.mesos.Protos.{TaskInfo => MesosTaskInfo, TaskState => MesosTaskState, _}
-import org.apache.spark.{Logging, SparkException, SparkContext, TaskState}
-import org.apache.spark.scheduler.{ExecutorExited, ExecutorLossReason, SchedulerBackend, SlaveLost,
- TaskDescription, TaskSchedulerImpl, WorkerOffer}
+import org.apache.spark.{Logging, SparkContext, SparkException, TaskState}
+import org.apache.spark.scheduler.{ExecutorExited, ExecutorLossReason, SchedulerBackend, SlaveLost, TaskDescription, TaskSchedulerImpl, WorkerOffer}
import org.apache.spark.util.Utils
/**
diff --git a/core/src/main/scala/org/apache/spark/scheduler/local/LocalBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/local/LocalBackend.scala
index 897d47a9ad..50f7e79e97 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/local/LocalBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/local/LocalBackend.scala
@@ -21,7 +21,7 @@ import java.nio.ByteBuffer
import akka.actor.{Actor, ActorRef, Props}
-import org.apache.spark.{Logging, SparkContext, SparkEnv, TaskState}
+import org.apache.spark.{Logging, SparkEnv, TaskState}
import org.apache.spark.TaskState.TaskState
import org.apache.spark.executor.{Executor, ExecutorBackend}
import org.apache.spark.scheduler.{SchedulerBackend, TaskSchedulerImpl, WorkerOffer}
diff --git a/core/src/main/scala/org/apache/spark/serializer/JavaSerializer.scala b/core/src/main/scala/org/apache/spark/serializer/JavaSerializer.scala
index 5d3d43623d..33c1705ad7 100644
--- a/core/src/main/scala/org/apache/spark/serializer/JavaSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/serializer/JavaSerializer.scala
@@ -20,8 +20,8 @@ package org.apache.spark.serializer
import java.io._
import java.nio.ByteBuffer
-import org.apache.spark.util.ByteBufferInputStream
import org.apache.spark.SparkConf
+import org.apache.spark.util.ByteBufferInputStream
private[spark] class JavaSerializationStream(out: OutputStream) extends SerializationStream {
val objOut = new ObjectOutputStream(out)
diff --git a/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala b/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala
index 2d0b255385..920490f9d0 100644
--- a/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala
@@ -17,13 +17,13 @@
package org.apache.spark.serializer
-import java.nio.ByteBuffer
import java.io.{EOFException, InputStream, OutputStream}
+import java.nio.ByteBuffer
-import com.esotericsoftware.kryo.serializers.{JavaSerializer => KryoJavaSerializer}
-import com.esotericsoftware.kryo.{KryoException, Kryo}
+import com.esotericsoftware.kryo.{Kryo, KryoException}
import com.esotericsoftware.kryo.io.{Input => KryoInput, Output => KryoOutput}
-import com.twitter.chill.{EmptyScalaKryoInstantiator, AllScalaRegistrar}
+import com.esotericsoftware.kryo.serializers.{JavaSerializer => KryoJavaSerializer}
+import com.twitter.chill.{AllScalaRegistrar, EmptyScalaKryoInstantiator}
import org.apache.spark._
import org.apache.spark.broadcast.HttpBroadcast
diff --git a/core/src/main/scala/org/apache/spark/serializer/Serializer.scala b/core/src/main/scala/org/apache/spark/serializer/Serializer.scala
index a38a2b59db..16677ab54b 100644
--- a/core/src/main/scala/org/apache/spark/serializer/Serializer.scala
+++ b/core/src/main/scala/org/apache/spark/serializer/Serializer.scala
@@ -22,8 +22,7 @@ import java.nio.ByteBuffer
import it.unimi.dsi.fastutil.io.FastByteArrayOutputStream
-import org.apache.spark.util.{NextIterator, ByteBufferInputStream}
-
+import org.apache.spark.util.{ByteBufferInputStream, NextIterator}
/**
* A serializer. Because some serialization libraries are not thread safe, this class is used to
diff --git a/core/src/main/scala/org/apache/spark/serializer/SerializerManager.scala b/core/src/main/scala/org/apache/spark/serializer/SerializerManager.scala
index 36a37af4f8..65ac0155f4 100644
--- a/core/src/main/scala/org/apache/spark/serializer/SerializerManager.scala
+++ b/core/src/main/scala/org/apache/spark/serializer/SerializerManager.scala
@@ -18,8 +18,8 @@
package org.apache.spark.serializer
import java.util.concurrent.ConcurrentHashMap
-import org.apache.spark.SparkConf
+import org.apache.spark.SparkConf
/**
* A service that returns a serializer object given the serializer's class name. If a previous
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockFetcherIterator.scala b/core/src/main/scala/org/apache/spark/storage/BlockFetcherIterator.scala
index aa62ab5aba..925022e7fe 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockFetcherIterator.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockFetcherIterator.scala
@@ -17,7 +17,6 @@
package org.apache.spark.storage
-import java.nio.ByteBuffer
import java.util.concurrent.LinkedBlockingQueue
import scala.collection.mutable.ArrayBuffer
@@ -26,15 +25,13 @@ import scala.collection.mutable.Queue
import io.netty.buffer.ByteBuf
-import org.apache.spark.Logging
-import org.apache.spark.SparkException
+import org.apache.spark.{Logging, SparkException}
import org.apache.spark.network.BufferMessage
import org.apache.spark.network.ConnectionManagerId
import org.apache.spark.network.netty.ShuffleCopier
import org.apache.spark.serializer.Serializer
import org.apache.spark.util.Utils
-
/**
* A block fetcher iterator interface. There are two implementations:
*
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManager.scala b/core/src/main/scala/org/apache/spark/storage/BlockManager.scala
index 780a3a15dd..a734ddc1ef 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockManager.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManager.scala
@@ -20,24 +20,21 @@ package org.apache.spark.storage
import java.io.{File, InputStream, OutputStream}
import java.nio.{ByteBuffer, MappedByteBuffer}
-import scala.collection.mutable.{HashMap, ArrayBuffer}
-import scala.util.Random
-
-import akka.actor.{ActorSystem, Cancellable, Props}
+import scala.collection.mutable.{ArrayBuffer, HashMap}
import scala.concurrent.{Await, Future}
-import scala.concurrent.duration.Duration
import scala.concurrent.duration._
+import scala.util.Random
+import akka.actor.{ActorSystem, Cancellable, Props}
import it.unimi.dsi.fastutil.io.{FastBufferedOutputStream, FastByteArrayOutputStream}
+import sun.nio.ch.DirectBuffer
-import org.apache.spark.{SparkConf, Logging, SparkEnv, SparkException}
+import org.apache.spark.{Logging, SparkConf, SparkEnv, SparkException}
import org.apache.spark.io.CompressionCodec
import org.apache.spark.network._
import org.apache.spark.serializer.Serializer
import org.apache.spark.util._
-import sun.nio.ch.DirectBuffer
-
private[spark] class BlockManager(
executorId: String,
actorSystem: ActorSystem,
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala
index 74207f59af..98cd6e68fa 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala
@@ -19,6 +19,7 @@ package org.apache.spark.storage
import java.io.{Externalizable, IOException, ObjectInput, ObjectOutput}
import java.util.concurrent.ConcurrentHashMap
+
import org.apache.spark.util.Utils
/**
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala
index c54e4f2664..e531467ccc 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala
@@ -23,7 +23,7 @@ import scala.concurrent.ExecutionContext.Implicits.global
import akka.actor._
import akka.pattern.ask
-import org.apache.spark.{SparkConf, Logging, SparkException}
+import org.apache.spark.{Logging, SparkConf, SparkException}
import org.apache.spark.storage.BlockManagerMessages._
import org.apache.spark.util.AkkaUtils
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterActor.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterActor.scala
index 893418fb8c..a999d76a32 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterActor.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterActor.scala
@@ -27,7 +27,7 @@ import scala.concurrent.duration._
import akka.actor.{Actor, ActorRef, Cancellable}
import akka.pattern.ask
-import org.apache.spark.{SparkConf, Logging, SparkException}
+import org.apache.spark.{Logging, SparkConf, SparkException}
import org.apache.spark.storage.BlockManagerMessages._
import org.apache.spark.util.{AkkaUtils, Utils}
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerMessages.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerMessages.scala
index 45f51da288..bbb9529b5a 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockManagerMessages.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerMessages.scala
@@ -21,7 +21,6 @@ import java.io.{Externalizable, ObjectInput, ObjectOutput}
import akka.actor.ActorRef
-
private[storage] object BlockManagerMessages {
//////////////////////////////////////////////////////////////////////////////////
// Messages from the master to slaves.
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerSlaveActor.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerSlaveActor.scala
index 3a65e55733..bcfb82d3c7 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockManagerSlaveActor.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerSlaveActor.scala
@@ -21,7 +21,6 @@ import akka.actor.Actor
import org.apache.spark.storage.BlockManagerMessages._
-
/**
* An actor to take commands from the master to execute options. For example,
* this is used to remove blocks from the slave's BlockManager.
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerSource.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerSource.scala
index 7cf754fb20..687586490a 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockManagerSource.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerSource.scala
@@ -19,9 +19,8 @@ package org.apache.spark.storage
import com.codahale.metrics.{Gauge,MetricRegistry}
-import org.apache.spark.metrics.source.Source
import org.apache.spark.SparkContext
-
+import org.apache.spark.metrics.source.Source
private[spark] class BlockManagerSource(val blockManager: BlockManager, sc: SparkContext)
extends Source {
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerWorker.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerWorker.scala
index 3efe738a08..c7766a3a65 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockManagerWorker.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerWorker.scala
@@ -19,7 +19,7 @@ package org.apache.spark.storage
import java.nio.ByteBuffer
-import org.apache.spark.{Logging}
+import org.apache.spark.Logging
import org.apache.spark.network._
import org.apache.spark.util.Utils
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockMessage.scala b/core/src/main/scala/org/apache/spark/storage/BlockMessage.scala
index fbafcf79d2..7168ae18c2 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockMessage.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockMessage.scala
@@ -19,8 +19,8 @@ package org.apache.spark.storage
import java.nio.ByteBuffer
-import scala.collection.mutable.StringBuilder
import scala.collection.mutable.ArrayBuffer
+import scala.collection.mutable.StringBuilder
import org.apache.spark.network._
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockStore.scala b/core/src/main/scala/org/apache/spark/storage/BlockStore.scala
index ea42656240..b047644b88 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockStore.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockStore.scala
@@ -18,6 +18,7 @@
package org.apache.spark.storage
import java.nio.ByteBuffer
+
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.Logging
diff --git a/core/src/main/scala/org/apache/spark/storage/DiskStore.scala b/core/src/main/scala/org/apache/spark/storage/DiskStore.scala
index 5a1e7b4444..d1f07ddb24 100644
--- a/core/src/main/scala/org/apache/spark/storage/DiskStore.scala
+++ b/core/src/main/scala/org/apache/spark/storage/DiskStore.scala
@@ -27,7 +27,6 @@ import org.apache.spark.Logging
import org.apache.spark.serializer.Serializer
import org.apache.spark.util.Utils
-
/**
* Stores BlockManager blocks on disk.
*/
diff --git a/core/src/main/scala/org/apache/spark/storage/MemoryStore.scala b/core/src/main/scala/org/apache/spark/storage/MemoryStore.scala
index eb5a185216..1814175651 100644
--- a/core/src/main/scala/org/apache/spark/storage/MemoryStore.scala
+++ b/core/src/main/scala/org/apache/spark/storage/MemoryStore.scala
@@ -17,10 +17,11 @@
package org.apache.spark.storage
-import java.util.LinkedHashMap
-import java.util.concurrent.ArrayBlockingQueue
import java.nio.ByteBuffer
-import collection.mutable.ArrayBuffer
+import java.util.LinkedHashMap
+
+import scala.collection.mutable.ArrayBuffer
+
import org.apache.spark.util.{SizeEstimator, Utils}
/**
diff --git a/core/src/main/scala/org/apache/spark/storage/StoragePerfTester.scala b/core/src/main/scala/org/apache/spark/storage/StoragePerfTester.scala
index 40734aab49..8cea302eb1 100644
--- a/core/src/main/scala/org/apache/spark/storage/StoragePerfTester.scala
+++ b/core/src/main/scala/org/apache/spark/storage/StoragePerfTester.scala
@@ -17,11 +17,11 @@
package org.apache.spark.storage
-import java.util.concurrent.atomic.AtomicLong
import java.util.concurrent.{CountDownLatch, Executors}
+import java.util.concurrent.atomic.AtomicLong
-import org.apache.spark.serializer.KryoSerializer
import org.apache.spark.SparkContext
+import org.apache.spark.serializer.KryoSerializer
import org.apache.spark.util.Utils
/**
diff --git a/core/src/main/scala/org/apache/spark/storage/StorageUtils.scala b/core/src/main/scala/org/apache/spark/storage/StorageUtils.scala
index 50a0cdb309..2d88a40fbb 100644
--- a/core/src/main/scala/org/apache/spark/storage/StorageUtils.scala
+++ b/core/src/main/scala/org/apache/spark/storage/StorageUtils.scala
@@ -17,8 +17,8 @@
package org.apache.spark.storage
-import org.apache.spark.{SparkContext}
-import BlockManagerMasterActor.BlockStatus
+import org.apache.spark.SparkContext
+import org.apache.spark.storage.BlockManagerMasterActor.BlockStatus
import org.apache.spark.util.Utils
private[spark]
diff --git a/core/src/main/scala/org/apache/spark/storage/ThreadingTest.scala b/core/src/main/scala/org/apache/spark/storage/ThreadingTest.scala
index 729ba2c550..1d81d006c0 100644
--- a/core/src/main/scala/org/apache/spark/storage/ThreadingTest.scala
+++ b/core/src/main/scala/org/apache/spark/storage/ThreadingTest.scala
@@ -17,12 +17,13 @@
package org.apache.spark.storage
-import akka.actor._
-
import java.util.concurrent.ArrayBlockingQueue
+
+import akka.actor._
import util.Random
+
+import org.apache.spark.SparkConf
import org.apache.spark.serializer.KryoSerializer
-import org.apache.spark.{SparkConf, SparkContext}
/**
* This class tests the BlockManager and MemoryStore for thread safety and
diff --git a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
index ade8ba1323..1f048a84cd 100644
--- a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
+++ b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
@@ -17,21 +17,19 @@
package org.apache.spark.ui
+import java.net.InetSocketAddress
import javax.servlet.http.{HttpServletResponse, HttpServletRequest}
import scala.annotation.tailrec
-import scala.util.{Try, Success, Failure}
+import scala.util.{Failure, Success, Try}
import scala.xml.Node
import net.liftweb.json.{JValue, pretty, render}
-
-import org.eclipse.jetty.server.{Server, Request, Handler}
-import org.eclipse.jetty.server.handler.{ResourceHandler, HandlerList, ContextHandler, AbstractHandler}
+import org.eclipse.jetty.server.{Handler, Request, Server}
+import org.eclipse.jetty.server.handler.{AbstractHandler, ContextHandler, HandlerList, ResourceHandler}
import org.eclipse.jetty.util.thread.QueuedThreadPool
import org.apache.spark.Logging
-import java.net.InetSocketAddress
-
/** Utilities for launching a web server using Jetty's HTTP Server class */
private[spark] object JettyUtils extends Logging {
diff --git a/core/src/main/scala/org/apache/spark/ui/SparkUI.scala b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
index 0196f43d74..af6b65860e 100644
--- a/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
@@ -17,16 +17,14 @@
package org.apache.spark.ui
-import javax.servlet.http.HttpServletRequest
-
import org.eclipse.jetty.server.{Handler, Server}
import org.apache.spark.{Logging, SparkContext, SparkEnv}
+import org.apache.spark.ui.JettyUtils._
import org.apache.spark.ui.env.EnvironmentUI
import org.apache.spark.ui.exec.ExecutorsUI
-import org.apache.spark.ui.storage.BlockManagerUI
import org.apache.spark.ui.jobs.JobProgressUI
-import org.apache.spark.ui.JettyUtils._
+import org.apache.spark.ui.storage.BlockManagerUI
import org.apache.spark.util.Utils
/** Top level user interface for Spark */
diff --git a/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala b/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala
index f913ee461b..18d2b5075a 100644
--- a/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala
+++ b/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala
@@ -23,7 +23,6 @@ import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.SparkContext._
import org.apache.spark.scheduler.SchedulingMode
-
/**
* Continuously generates jobs that expose various features of the WebUI (internal testing tool).
*
diff --git a/core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala b/core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala
index 88f41be8d3..9e7cdc8816 100644
--- a/core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala
@@ -25,11 +25,10 @@ import scala.xml.Node
import org.eclipse.jetty.server.Handler
+import org.apache.spark.SparkContext
import org.apache.spark.ui.JettyUtils._
-import org.apache.spark.ui.UIUtils
import org.apache.spark.ui.Page.Environment
-import org.apache.spark.SparkContext
-
+import org.apache.spark.ui.UIUtils
private[spark] class EnvironmentUI(sc: SparkContext) {
diff --git a/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsUI.scala b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsUI.scala
index 4e41acf023..1f3b7a4c23 100644
--- a/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsUI.scala
@@ -26,14 +26,13 @@ import org.eclipse.jetty.server.Handler
import org.apache.spark.{ExceptionFailure, Logging, SparkContext}
import org.apache.spark.executor.TaskMetrics
-import org.apache.spark.scheduler.{SparkListenerTaskStart, SparkListenerTaskEnd, SparkListener}
+import org.apache.spark.scheduler.{SparkListener, SparkListenerTaskEnd, SparkListenerTaskStart}
import org.apache.spark.scheduler.TaskInfo
import org.apache.spark.ui.JettyUtils._
import org.apache.spark.ui.Page.Executors
import org.apache.spark.ui.UIUtils
import org.apache.spark.util.Utils
-
private[spark] class ExecutorsUI(val sc: SparkContext) {
private var _listener: Option[ExecutorsListener] = None
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/ExecutorTable.scala b/core/src/main/scala/org/apache/spark/ui/jobs/ExecutorTable.scala
index ab03eb5ce1..d012ba4dbb 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/ExecutorTable.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/ExecutorTable.scala
@@ -17,11 +17,11 @@
package org.apache.spark.ui.jobs
+import scala.collection.mutable
import scala.xml.Node
import org.apache.spark.scheduler.SchedulingMode
import org.apache.spark.util.Utils
-import scala.collection.mutable
/** Page showing executor summary */
private[spark] class ExecutorTable(val parent: JobProgressUI, val stageId: Int) {
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala b/core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala
index 6289f8744f..81713edcf5 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala
@@ -25,7 +25,6 @@ import org.apache.spark.scheduler.SchedulingMode
import org.apache.spark.ui.Page._
import org.apache.spark.ui.UIUtils._
-
/** Page showing list of all ongoing and recently finished stages and pools*/
private[spark] class IndexPage(parent: JobProgressUI) {
def listener = parent.listener
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala
index 858a10ce75..07a08f5277 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala
@@ -17,7 +17,6 @@
package org.apache.spark.ui.jobs
-import scala.Seq
import scala.collection.mutable.{ListBuffer, HashMap, HashSet}
import org.apache.spark.{ExceptionFailure, SparkContext, Success}
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressUI.scala b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressUI.scala
index c1ee2f3d00..557bce6b66 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressUI.scala
@@ -17,23 +17,15 @@
package org.apache.spark.ui.jobs
-import scala.concurrent.duration._
-
import java.text.SimpleDateFormat
-
import javax.servlet.http.HttpServletRequest
-import org.eclipse.jetty.server.Handler
-
import scala.Seq
-import scala.collection.mutable.{HashSet, ListBuffer, HashMap, ArrayBuffer}
+import org.eclipse.jetty.server.Handler
+
+import org.apache.spark.SparkContext
import org.apache.spark.ui.JettyUtils._
-import org.apache.spark.{ExceptionFailure, SparkContext, Success}
-import org.apache.spark.scheduler._
-import collection.mutable
-import org.apache.spark.scheduler.SchedulingMode
-import org.apache.spark.scheduler.SchedulingMode.SchedulingMode
import org.apache.spark.util.Utils
/** Web UI showing progress status of all jobs in the given SparkContext. */
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala b/core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala
index 89fffcb80d..eb7518a020 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala
@@ -19,12 +19,10 @@ package org.apache.spark.ui.jobs
import javax.servlet.http.HttpServletRequest
-import scala.xml.{NodeSeq, Node}
-import scala.collection.mutable.HashSet
+import scala.xml.Node
-import org.apache.spark.scheduler.Stage
-import org.apache.spark.ui.UIUtils._
import org.apache.spark.ui.Page._
+import org.apache.spark.ui.UIUtils._
/** Page showing specific pool details */
private[spark] class PoolPage(parent: JobProgressUI) {
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala b/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
index b6e98942ab..ddc687a45a 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
@@ -18,17 +18,16 @@
package org.apache.spark.ui.jobs
import java.util.Date
-
import javax.servlet.http.HttpServletRequest
import scala.xml.Node
-import org.apache.spark.{ExceptionFailure}
+import org.apache.spark.ExceptionFailure
import org.apache.spark.executor.TaskMetrics
+import org.apache.spark.scheduler.TaskInfo
import org.apache.spark.ui.UIUtils._
import org.apache.spark.ui.Page._
import org.apache.spark.util.{Utils, Distribution}
-import org.apache.spark.scheduler.TaskInfo
/** Page showing statistics and task list for a given stage */
private[spark] class StagePage(parent: JobProgressUI) {
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/StageTable.scala b/core/src/main/scala/org/apache/spark/ui/jobs/StageTable.scala
index 999a94fc2d..c5fd3ae16d 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/StageTable.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/StageTable.scala
@@ -19,14 +19,13 @@ package org.apache.spark.ui.jobs
import java.util.Date
-import scala.xml.Node
import scala.collection.mutable.HashSet
+import scala.xml.Node
import org.apache.spark.scheduler.{SchedulingMode, StageInfo, TaskInfo}
import org.apache.spark.ui.UIUtils
import org.apache.spark.util.Utils
-
/** Page showing list of all ongoing and recently finished stages */
private[spark] class StageTable(val stages: Seq[StageInfo], val parent: JobProgressUI) {
diff --git a/core/src/main/scala/org/apache/spark/ui/storage/BlockManagerUI.scala b/core/src/main/scala/org/apache/spark/ui/storage/BlockManagerUI.scala
index 39f422dd6b..dc18eab74e 100644
--- a/core/src/main/scala/org/apache/spark/ui/storage/BlockManagerUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/storage/BlockManagerUI.scala
@@ -17,8 +17,6 @@
package org.apache.spark.ui.storage
-import scala.concurrent.duration._
-
import javax.servlet.http.HttpServletRequest
import org.eclipse.jetty.server.Handler
diff --git a/core/src/main/scala/org/apache/spark/ui/storage/IndexPage.scala b/core/src/main/scala/org/apache/spark/ui/storage/IndexPage.scala
index 109a7d4094..6a3c41fb11 100644
--- a/core/src/main/scala/org/apache/spark/ui/storage/IndexPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/storage/IndexPage.scala
@@ -22,8 +22,8 @@ import javax.servlet.http.HttpServletRequest
import scala.xml.Node
import org.apache.spark.storage.{RDDInfo, StorageUtils}
-import org.apache.spark.ui.UIUtils._
import org.apache.spark.ui.Page._
+import org.apache.spark.ui.UIUtils._
import org.apache.spark.util.Utils
/** Page showing list of RDD's currently stored in the cluster */
diff --git a/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala b/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala
index b83cd54f3c..78b149b14b 100644
--- a/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala
@@ -23,11 +23,10 @@ import scala.xml.Node
import org.apache.spark.storage.{BlockId, StorageStatus, StorageUtils}
import org.apache.spark.storage.BlockManagerMasterActor.BlockStatus
-import org.apache.spark.ui.UIUtils._
import org.apache.spark.ui.Page._
+import org.apache.spark.ui.UIUtils._
import org.apache.spark.util.Utils
-
/** Page showing storage details for a given RDD */
private[spark] class RDDPage(parent: BlockManagerUI) {
val sc = parent.sc
diff --git a/core/src/main/scala/org/apache/spark/util/AkkaUtils.scala b/core/src/main/scala/org/apache/spark/util/AkkaUtils.scala
index 761d378c7f..f26ed47e58 100644
--- a/core/src/main/scala/org/apache/spark/util/AkkaUtils.scala
+++ b/core/src/main/scala/org/apache/spark/util/AkkaUtils.scala
@@ -22,8 +22,8 @@ import scala.concurrent.duration.{Duration, FiniteDuration}
import akka.actor.{ActorSystem, ExtendedActorSystem, IndestructibleActorSystem}
import com.typesafe.config.ConfigFactory
-
import org.apache.log4j.{Level, Logger}
+
import org.apache.spark.SparkConf
/**
diff --git a/core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala b/core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala
index a38329df03..c3692f2fd9 100644
--- a/core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala
+++ b/core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala
@@ -19,8 +19,9 @@ package org.apache.spark.util
import java.io.Serializable
import java.util.{PriorityQueue => JPriorityQueue}
-import scala.collection.generic.Growable
+
import scala.collection.JavaConverters._
+import scala.collection.generic.Growable
/**
* Bounded priority queue. This class wraps the original PriorityQueue
diff --git a/core/src/main/scala/org/apache/spark/util/ByteBufferInputStream.scala b/core/src/main/scala/org/apache/spark/util/ByteBufferInputStream.scala
index e214d2a519..54de4d4ee8 100644
--- a/core/src/main/scala/org/apache/spark/util/ByteBufferInputStream.scala
+++ b/core/src/main/scala/org/apache/spark/util/ByteBufferInputStream.scala
@@ -19,6 +19,7 @@ package org.apache.spark.util
import java.io.InputStream
import java.nio.ByteBuffer
+
import org.apache.spark.storage.BlockManager
/**
diff --git a/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala b/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala
index c0c057be8d..681d0a30cb 100644
--- a/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala
+++ b/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala
@@ -17,14 +17,14 @@
package org.apache.spark.util
-import java.lang.reflect.Field
+import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
import scala.collection.mutable.Map
import scala.collection.mutable.Set
import org.objectweb.asm.{ClassReader, ClassVisitor, MethodVisitor, Type}
import org.objectweb.asm.Opcodes._
-import java.io.{ByteArrayOutputStream, ByteArrayInputStream}
+
import org.apache.spark.Logging
private[spark] object ClosureCleaner extends Logging {
diff --git a/core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala b/core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala
index 3868ab3631..0448919e09 100644
--- a/core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala
+++ b/core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala
@@ -17,9 +17,9 @@
package org.apache.spark.util
-import java.util.{TimerTask, Timer}
-import org.apache.spark.{SparkConf, Logging}
+import java.util.{Timer, TimerTask}
+import org.apache.spark.{Logging, SparkConf}
/**
* Runs a timer task to periodically clean up metadata (e.g. old files or hashtable entries)
diff --git a/core/src/main/scala/org/apache/spark/util/MutablePair.scala b/core/src/main/scala/org/apache/spark/util/MutablePair.scala
index 34f1f6606f..b053266f12 100644
--- a/core/src/main/scala/org/apache/spark/util/MutablePair.scala
+++ b/core/src/main/scala/org/apache/spark/util/MutablePair.scala
@@ -17,7 +17,6 @@
package org.apache.spark.util
-
/**
* A tuple of 2 elements. This can be used as an alternative to Scala's Tuple2 when we want to
* minimize object allocation.
diff --git a/core/src/main/scala/org/apache/spark/util/SerializableBuffer.scala b/core/src/main/scala/org/apache/spark/util/SerializableBuffer.scala
index f2b1ad7d0e..2b452ad33b 100644
--- a/core/src/main/scala/org/apache/spark/util/SerializableBuffer.scala
+++ b/core/src/main/scala/org/apache/spark/util/SerializableBuffer.scala
@@ -17,8 +17,8 @@
package org.apache.spark.util
+import java.io.{EOFException, IOException, ObjectInputStream, ObjectOutputStream}
import java.nio.ByteBuffer
-import java.io.{IOException, ObjectOutputStream, EOFException, ObjectInputStream}
import java.nio.channels.Channels
/**
diff --git a/core/src/main/scala/org/apache/spark/util/SerializableHyperLogLog.scala b/core/src/main/scala/org/apache/spark/util/SerializableHyperLogLog.scala
index 2110b3596e..21a88eea3b 100644
--- a/core/src/main/scala/org/apache/spark/util/SerializableHyperLogLog.scala
+++ b/core/src/main/scala/org/apache/spark/util/SerializableHyperLogLog.scala
@@ -17,8 +17,9 @@
package org.apache.spark.util
-import java.io.{Externalizable, ObjectOutput, ObjectInput}
-import com.clearspring.analytics.stream.cardinality.{ICardinality, HyperLogLog}
+import java.io.{Externalizable, ObjectInput, ObjectOutput}
+
+import com.clearspring.analytics.stream.cardinality.{HyperLogLog, ICardinality}
/**
* A wrapper around [[com.clearspring.analytics.stream.cardinality.HyperLogLog]] that is
diff --git a/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala b/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala
index 17c6481c18..b955612ca7 100644
--- a/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala
+++ b/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala
@@ -17,20 +17,19 @@
package org.apache.spark.util
+import java.lang.management.ManagementFactory
+import java.lang.reflect.{Array => JArray}
import java.lang.reflect.Field
import java.lang.reflect.Modifier
-import java.lang.reflect.{Array => JArray}
import java.util.IdentityHashMap
-import java.util.concurrent.ConcurrentHashMap
import java.util.Random
-
-import javax.management.MBeanServer
-import java.lang.management.ManagementFactory
+import java.util.concurrent.ConcurrentHashMap
import scala.collection.mutable.ArrayBuffer
import it.unimi.dsi.fastutil.ints.IntOpenHashSet
-import org.apache.spark.{SparkEnv, SparkConf, SparkContext, Logging}
+
+import org.apache.spark.Logging
/**
* Estimates the sizes of Java objects (number of bytes of memory they occupy), for use in
diff --git a/core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala b/core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala
index 8e07a0f29a..ddbd084ed7 100644
--- a/core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala
+++ b/core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala
@@ -18,10 +18,11 @@
package org.apache.spark.util
import java.util.concurrent.ConcurrentHashMap
+
import scala.collection.JavaConversions
-import scala.collection.mutable.Map
import scala.collection.immutable
-import org.apache.spark.scheduler.MapStatus
+import scala.collection.mutable.Map
+
import org.apache.spark.Logging
/**
diff --git a/core/src/main/scala/org/apache/spark/util/TimeStampedHashSet.scala b/core/src/main/scala/org/apache/spark/util/TimeStampedHashSet.scala
index 26983138ff..19bece86b3 100644
--- a/core/src/main/scala/org/apache/spark/util/TimeStampedHashSet.scala
+++ b/core/src/main/scala/org/apache/spark/util/TimeStampedHashSet.scala
@@ -17,10 +17,10 @@
package org.apache.spark.util
-import scala.collection.mutable.Set
-import scala.collection.JavaConversions
import java.util.concurrent.ConcurrentHashMap
+import scala.collection.JavaConversions
+import scala.collection.mutable.Set
class TimeStampedHashSet[A] extends Set[A] {
val internalMap = new ConcurrentHashMap[A, Long]()
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index 8749ab7875..8e69f1d335 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -18,7 +18,8 @@
package org.apache.spark.util
import java.io._
-import java.net.{InetAddress, URL, URI, NetworkInterface, Inet4Address}
+import java.net.{InetAddress, Inet4Address, NetworkInterface, URI, URL}
+import java.nio.ByteBuffer
import java.util.{Locale, Random, UUID}
import java.util.concurrent.{ConcurrentHashMap, Executors, ThreadPoolExecutor}
@@ -30,16 +31,11 @@ import scala.reflect.ClassTag
import com.google.common.io.Files
import com.google.common.util.concurrent.ThreadFactoryBuilder
+import org.apache.hadoop.fs.{FileSystem, FileUtil, Path}
-import org.apache.hadoop.conf.Configuration
-import org.apache.hadoop.fs.{Path, FileSystem, FileUtil}
-import org.apache.hadoop.io._
-
+import org.apache.spark.{Logging, SparkConf, SparkException}
import org.apache.spark.serializer.{DeserializationStream, SerializationStream, SerializerInstance}
import org.apache.spark.deploy.SparkHadoopUtil
-import java.nio.ByteBuffer
-import org.apache.spark.{SparkConf, SparkException, Logging}
-
/**
* Various utility methods used by Spark.
diff --git a/core/src/main/scala/org/apache/spark/util/Vector.scala b/core/src/main/scala/org/apache/spark/util/Vector.scala
index 96da93d854..d437c055f3 100644
--- a/core/src/main/scala/org/apache/spark/util/Vector.scala
+++ b/core/src/main/scala/org/apache/spark/util/Vector.scala
@@ -18,6 +18,7 @@
package org.apache.spark.util
import scala.util.Random
+
import org.apache.spark.util.random.XORShiftRandom
class Vector(val elements: Array[Double]) extends Serializable {
diff --git a/core/src/main/scala/org/apache/spark/util/collection/BitSet.scala b/core/src/main/scala/org/apache/spark/util/collection/BitSet.scala
index c9cf512843..d3153d2cac 100644
--- a/core/src/main/scala/org/apache/spark/util/collection/BitSet.scala
+++ b/core/src/main/scala/org/apache/spark/util/collection/BitSet.scala
@@ -17,7 +17,6 @@
package org.apache.spark.util.collection
-
/**
* A simple, fixed-size bit set implementation. This implementation is fast because it avoids
* safety/bound checking.
diff --git a/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala b/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala
index 59ba1e457c..856d092ab3 100644
--- a/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala
+++ b/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala
@@ -23,8 +23,8 @@ import java.util.Comparator
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
-import it.unimi.dsi.fastutil.io.FastBufferedInputStream
import com.google.common.io.ByteStreams
+import it.unimi.dsi.fastutil.io.FastBufferedInputStream
import org.apache.spark.{Logging, SparkEnv}
import org.apache.spark.serializer.Serializer
diff --git a/core/src/main/scala/org/apache/spark/util/random/RandomSampler.scala b/core/src/main/scala/org/apache/spark/util/random/RandomSampler.scala
index 6b66d54751..0f1fca4813 100644
--- a/core/src/main/scala/org/apache/spark/util/random/RandomSampler.scala
+++ b/core/src/main/scala/org/apache/spark/util/random/RandomSampler.scala
@@ -18,6 +18,7 @@
package org.apache.spark.util.random
import java.util.Random
+
import cern.jet.random.Poisson
import cern.jet.random.engine.DRand
diff --git a/core/src/main/scala/org/apache/spark/util/random/XORShiftRandom.scala b/core/src/main/scala/org/apache/spark/util/random/XORShiftRandom.scala
index 20d32d01b5..ca611b67ed 100644
--- a/core/src/main/scala/org/apache/spark/util/random/XORShiftRandom.scala
+++ b/core/src/main/scala/org/apache/spark/util/random/XORShiftRandom.scala
@@ -18,6 +18,7 @@
package org.apache.spark.util.random
import java.util.{Random => JavaRandom}
+
import org.apache.spark.util.Utils.timeIt
/**
diff --git a/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
index c443c5266e..6c73ea6949 100644
--- a/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
@@ -17,12 +17,11 @@
package org.apache.spark
+import scala.collection.mutable
+
import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
-import collection.mutable
-import java.util.Random
-import scala.math.exp
-import scala.math.signum
+
import org.apache.spark.SparkContext._
class AccumulatorSuite extends FunSuite with ShouldMatchers with LocalSparkContext {
diff --git a/core/src/test/scala/org/apache/spark/CheckpointSuite.scala b/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
index ec13b329b2..d2e29f20f0 100644
--- a/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
+++ b/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
@@ -17,11 +17,14 @@
package org.apache.spark
+import java.io.File
+
import scala.reflect.ClassTag
+
import org.scalatest.FunSuite
-import java.io.File
-import org.apache.spark.rdd._
+
import org.apache.spark.SparkContext._
+import org.apache.spark.rdd._
import org.apache.spark.storage.{BlockId, StorageLevel, TestBlockId}
import org.apache.spark.util.Utils
diff --git a/core/src/test/scala/org/apache/spark/DistributedSuite.scala b/core/src/test/scala/org/apache/spark/DistributedSuite.scala
index 8de7a328d1..14ddd6f1ec 100644
--- a/core/src/test/scala/org/apache/spark/DistributedSuite.scala
+++ b/core/src/test/scala/org/apache/spark/DistributedSuite.scala
@@ -17,17 +17,16 @@
package org.apache.spark
-import network.ConnectionManagerId
import org.scalatest.BeforeAndAfter
-import org.scalatest.concurrent.Timeouts._
import org.scalatest.FunSuite
+import org.scalatest.concurrent.Timeouts._
import org.scalatest.matchers.ShouldMatchers
-import org.scalatest.time.{Span, Millis}
+import org.scalatest.time.{Millis, Span}
-import SparkContext._
+import org.apache.spark.SparkContext._
+import org.apache.spark.network.ConnectionManagerId
import org.apache.spark.storage.{BlockManagerWorker, GetBlock, RDDBlockId, StorageLevel}
-
class NotSerializableClass
class NotSerializableExn(val notSer: NotSerializableClass) extends Throwable() {}
diff --git a/core/src/test/scala/org/apache/spark/DriverSuite.scala b/core/src/test/scala/org/apache/spark/DriverSuite.scala
index fb89537258..e0e8011278 100644
--- a/core/src/test/scala/org/apache/spark/DriverSuite.scala
+++ b/core/src/test/scala/org/apache/spark/DriverSuite.scala
@@ -26,6 +26,7 @@ import org.scalatest.FunSuite
import org.scalatest.concurrent.Timeouts
import org.scalatest.prop.TableDrivenPropertyChecks._
import org.scalatest.time.SpanSugar._
+
import org.apache.spark.util.Utils
class DriverSuite extends FunSuite with Timeouts {
diff --git a/core/src/test/scala/org/apache/spark/FailureSuite.scala b/core/src/test/scala/org/apache/spark/FailureSuite.scala
index befdc1589f..ac3c86778d 100644
--- a/core/src/test/scala/org/apache/spark/FailureSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FailureSuite.scala
@@ -19,7 +19,7 @@ package org.apache.spark
import org.scalatest.FunSuite
-import SparkContext._
+import org.apache.spark.SparkContext._
import org.apache.spark.util.NonSerializable
// Common state shared by FailureSuite-launched tasks. We use a global object
diff --git a/core/src/test/scala/org/apache/spark/FileServerSuite.scala b/core/src/test/scala/org/apache/spark/FileServerSuite.scala
index a2eb9a4e84..9be67b3c95 100644
--- a/core/src/test/scala/org/apache/spark/FileServerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FileServerSuite.scala
@@ -20,10 +20,11 @@ package org.apache.spark
import java.io._
import java.util.jar.{JarEntry, JarOutputStream}
-import SparkContext._
import com.google.common.io.Files
import org.scalatest.FunSuite
+import org.apache.spark.SparkContext._
+
class FileServerSuite extends FunSuite with LocalSparkContext {
@transient var tmpFile: File = _
diff --git a/core/src/test/scala/org/apache/spark/FileSuite.scala b/core/src/test/scala/org/apache/spark/FileSuite.scala
index 7b82a4cdd9..8ff02aef67 100644
--- a/core/src/test/scala/org/apache/spark/FileSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FileSuite.scala
@@ -17,17 +17,16 @@
package org.apache.spark
-import java.io.{FileWriter, PrintWriter, File}
+import java.io.{File, FileWriter}
import scala.io.Source
import com.google.common.io.Files
-import org.scalatest.FunSuite
import org.apache.hadoop.io._
-import org.apache.hadoop.io.compress.{DefaultCodec, CompressionCodec, GzipCodec}
-
+import org.apache.hadoop.io.compress.DefaultCodec
+import org.scalatest.FunSuite
-import SparkContext._
+import org.apache.spark.SparkContext._
class FileSuite extends FunSuite with LocalSparkContext {
diff --git a/core/src/test/scala/org/apache/spark/JavaAPISuite.java b/core/src/test/scala/org/apache/spark/JavaAPISuite.java
index 8c573ac0d6..20232e9fbb 100644
--- a/core/src/test/scala/org/apache/spark/JavaAPISuite.java
+++ b/core/src/test/scala/org/apache/spark/JavaAPISuite.java
@@ -22,14 +22,14 @@ import java.io.IOException;
import java.io.Serializable;
import java.util.*;
-import com.google.common.base.Optional;
import scala.Tuple2;
+import com.google.common.base.Optional;
import com.google.common.base.Charsets;
-import org.apache.hadoop.io.compress.DefaultCodec;
import com.google.common.io.Files;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.compress.DefaultCodec;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.mapreduce.Job;
@@ -48,7 +48,6 @@ import org.apache.spark.partial.PartialResult;
import org.apache.spark.storage.StorageLevel;
import org.apache.spark.util.StatCounter;
-
// The test suite itself is Serializable so that anonymous Function implementations can be
// serialized, as an alternative to converting these anonymous classes to static inner classes;
// see http://stackoverflow.com/questions/758570/.
diff --git a/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala b/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala
index 1121e06e2e..20c503d30c 100644
--- a/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala
+++ b/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala
@@ -20,9 +20,9 @@ package org.apache.spark
import java.util.concurrent.Semaphore
import scala.concurrent.Await
+import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.future
-import scala.concurrent.ExecutionContext.Implicits.global
import org.scalatest.{BeforeAndAfter, FunSuite}
import org.scalatest.matchers.ShouldMatchers
@@ -30,7 +30,6 @@ import org.scalatest.matchers.ShouldMatchers
import org.apache.spark.SparkContext._
import org.apache.spark.scheduler.{SparkListenerTaskStart, SparkListener}
-
/**
* Test suite for cancelling running jobs. We run the cancellation tasks for single job action
* (e.g. count) as well as multi-job action (e.g. take). We test the local and cluster schedulers
diff --git a/core/src/test/scala/org/apache/spark/LocalSparkContext.scala b/core/src/test/scala/org/apache/spark/LocalSparkContext.scala
index 3ac706110e..4b972f88a9 100644
--- a/core/src/test/scala/org/apache/spark/LocalSparkContext.scala
+++ b/core/src/test/scala/org/apache/spark/LocalSparkContext.scala
@@ -17,12 +17,11 @@
package org.apache.spark
-import org.scalatest.Suite
-import org.scalatest.BeforeAndAfterEach
-import org.scalatest.BeforeAndAfterAll
-
import org.jboss.netty.logging.InternalLoggerFactory
import org.jboss.netty.logging.Slf4JLoggerFactory
+import org.scalatest.BeforeAndAfterAll
+import org.scalatest.BeforeAndAfterEach
+import org.scalatest.Suite
/** Manages a local `sc` {@link SparkContext} variable, correctly stopping it after each test. */
trait LocalSparkContext extends BeforeAndAfterEach with BeforeAndAfterAll { self: Suite =>
diff --git a/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
index 930c2523ca..6c1e325f6f 100644
--- a/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
@@ -17,13 +17,14 @@
package org.apache.spark
-import org.scalatest.FunSuite
+import scala.concurrent.Await
import akka.actor._
+import org.scalatest.FunSuite
+
import org.apache.spark.scheduler.MapStatus
import org.apache.spark.storage.BlockManagerId
import org.apache.spark.util.AkkaUtils
-import scala.concurrent.Await
class MapOutputTrackerSuite extends FunSuite with LocalSparkContext {
private val conf = new SparkConf
diff --git a/core/src/test/scala/org/apache/spark/PartitioningSuite.scala b/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
index 1c5d5ea436..4305686d3a 100644
--- a/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
+++ b/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
@@ -18,13 +18,12 @@
package org.apache.spark
import scala.math.abs
-import scala.collection.mutable.ArrayBuffer
import org.scalatest.{FunSuite, PrivateMethodTester}
import org.apache.spark.SparkContext._
-import org.apache.spark.util.StatCounter
import org.apache.spark.rdd.RDD
+import org.apache.spark.util.StatCounter
class PartitioningSuite extends FunSuite with SharedSparkContext with PrivateMethodTester {
diff --git a/core/src/test/scala/org/apache/spark/PipedRDDSuite.scala b/core/src/test/scala/org/apache/spark/PipedRDDSuite.scala
index 2e851d892d..3a0385a1b0 100644
--- a/core/src/test/scala/org/apache/spark/PipedRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/PipedRDDSuite.scala
@@ -18,7 +18,6 @@
package org.apache.spark
import org.scalatest.FunSuite
-import SparkContext._
class PipedRDDSuite extends FunSuite with SharedSparkContext {
diff --git a/core/src/test/scala/org/apache/spark/SharedSparkContext.scala b/core/src/test/scala/org/apache/spark/SharedSparkContext.scala
index c650ef4ed5..0b6511a80d 100644
--- a/core/src/test/scala/org/apache/spark/SharedSparkContext.scala
+++ b/core/src/test/scala/org/apache/spark/SharedSparkContext.scala
@@ -17,8 +17,8 @@
package org.apache.spark
-import org.scalatest.Suite
import org.scalatest.BeforeAndAfterAll
+import org.scalatest.Suite
/** Shares a local `SparkContext` between all tests in a suite and closes it at the end */
trait SharedSparkContext extends BeforeAndAfterAll { self: Suite =>
diff --git a/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala b/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala
index e121b162ad..29d428aa7d 100644
--- a/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala
+++ b/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala
@@ -19,7 +19,6 @@ package org.apache.spark
import org.scalatest.BeforeAndAfterAll
-
class ShuffleNettySuite extends ShuffleSuite with BeforeAndAfterAll {
// This test suite should run all tests in ShuffleSuite with Netty shuffle mode.
diff --git a/core/src/test/scala/org/apache/spark/ShuffleSuite.scala b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
index db717865db..abea36f7c8 100644
--- a/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
@@ -22,10 +22,9 @@ import org.scalatest.matchers.ShouldMatchers
import org.apache.spark.SparkContext._
import org.apache.spark.ShuffleSuite.NonJavaSerializableClass
-import org.apache.spark.rdd.{RDD, SubtractedRDD, CoGroupedRDD, OrderedRDDFunctions, ShuffledRDD}
-import org.apache.spark.util.MutablePair
+import org.apache.spark.rdd.{CoGroupedRDD, OrderedRDDFunctions, RDD, ShuffledRDD, SubtractedRDD}
import org.apache.spark.serializer.KryoSerializer
-
+import org.apache.spark.util.MutablePair
class ShuffleSuite extends FunSuite with ShouldMatchers with LocalSparkContext {
test("groupByKey without compression") {
diff --git a/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala
index 939fe51801..5cb49d9a7f 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala
@@ -18,7 +18,6 @@
package org.apache.spark
import org.scalatest.FunSuite
-import org.apache.spark.SparkContext._
class SparkContextInfoSuite extends FunSuite with LocalSparkContext {
test("getPersistentRDDs only returns RDDs that are marked as cached") {
diff --git a/core/src/test/scala/org/apache/spark/ThreadingSuite.scala b/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
index 75d6493e33..b5383d553a 100644
--- a/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
@@ -22,9 +22,6 @@ import java.util.concurrent.atomic.AtomicBoolean
import java.util.concurrent.atomic.AtomicInteger
import org.scalatest.FunSuite
-import org.scalatest.BeforeAndAfter
-
-import SparkContext._
/**
* Holds state shared across task threads in some ThreadingSuite tests.
diff --git a/core/src/test/scala/org/apache/spark/UnpersistSuite.scala b/core/src/test/scala/org/apache/spark/UnpersistSuite.scala
index 768ca3850e..42ff059e01 100644
--- a/core/src/test/scala/org/apache/spark/UnpersistSuite.scala
+++ b/core/src/test/scala/org/apache/spark/UnpersistSuite.scala
@@ -19,8 +19,7 @@ package org.apache.spark
import org.scalatest.FunSuite
import org.scalatest.concurrent.Timeouts._
-import org.scalatest.time.{Span, Millis}
-import org.apache.spark.SparkContext._
+import org.scalatest.time.{Millis, Span}
class UnpersistSuite extends FunSuite with LocalSparkContext {
test("unpersist RDD") {
diff --git a/core/src/test/scala/org/apache/spark/ZippedPartitionsSuite.scala b/core/src/test/scala/org/apache/spark/ZippedPartitionsSuite.scala
index 618b9c113b..4f87fd8654 100644
--- a/core/src/test/scala/org/apache/spark/ZippedPartitionsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ZippedPartitionsSuite.scala
@@ -17,16 +17,7 @@
package org.apache.spark
-import scala.collection.immutable.NumericRange
-
import org.scalatest.FunSuite
-import org.scalatest.prop.Checkers
-import org.scalacheck.Arbitrary._
-import org.scalacheck.Gen
-import org.scalacheck.Prop._
-
-import SparkContext._
-
object ZippedPartitionsSuite {
def procZippedData(i: Iterator[Int], s: Iterator[String], d: Iterator[Double]) : Iterator[Int] = {
diff --git a/core/src/test/scala/org/apache/spark/api/python/PythonRDDSuite.scala b/core/src/test/scala/org/apache/spark/api/python/PythonRDDSuite.scala
index 5bcebabc9a..7b866f08a0 100644
--- a/core/src/test/scala/org/apache/spark/api/python/PythonRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/api/python/PythonRDDSuite.scala
@@ -17,11 +17,10 @@
package org.apache.spark.api.python
-import org.scalatest.FunSuite
-import org.scalatest.matchers.ShouldMatchers
-
import java.io.{ByteArrayOutputStream, DataOutputStream}
+import org.scalatest.FunSuite
+
class PythonRDDSuite extends FunSuite {
test("Writing large strings to the worker") {
diff --git a/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala b/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
index 6445db0063..de866ed7ff 100644
--- a/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
@@ -27,7 +27,7 @@ import org.scalatest.FunSuite
import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse}
import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, RecoveryState, WorkerInfo}
-import org.apache.spark.deploy.worker.{ExecutorRunner, DriverRunner}
+import org.apache.spark.deploy.worker.{DriverRunner, ExecutorRunner}
class JsonProtocolSuite extends FunSuite {
diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala b/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala
index 0c50261264..a2c131b0c9 100644
--- a/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala
@@ -19,15 +19,13 @@ package org.apache.spark.deploy.worker
import java.io.File
-import scala.collection.JavaConversions._
-
import org.mockito.Mockito._
import org.mockito.Matchers._
+import org.mockito.invocation.InvocationOnMock
+import org.mockito.stubbing.Answer
import org.scalatest.FunSuite
import org.apache.spark.deploy.{Command, DriverDescription}
-import org.mockito.stubbing.Answer
-import org.mockito.invocation.InvocationOnMock
class DriverRunnerTest extends FunSuite {
private def createDriverRunner() = {
diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala b/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala
index 4baa65659f..3cab8e7b37 100644
--- a/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala
@@ -21,7 +21,7 @@ import java.io.File
import org.scalatest.FunSuite
-import org.apache.spark.deploy.{ExecutorState, Command, ApplicationDescription}
+import org.apache.spark.deploy.{ApplicationDescription, Command, ExecutorState}
class ExecutorRunnerTest extends FunSuite {
test("command includes appId") {
diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala b/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala
index 1f1d8d1380..0b5ed6d770 100644
--- a/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala
@@ -17,11 +17,10 @@
package org.apache.spark.deploy.worker
-
+import akka.actor.{ActorSystem, AddressFromURIString, Props}
import akka.testkit.TestActorRef
-import org.scalatest.FunSuite
import akka.remote.DisassociatedEvent
-import akka.actor.{ActorSystem, AddressFromURIString, Props}
+import org.scalatest.FunSuite
class WorkerWatcherSuite extends FunSuite {
test("WorkerWatcher shuts down on valid disassociation") {
diff --git a/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala b/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala
index 8d7546085f..68a0ea36aa 100644
--- a/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala
+++ b/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala
@@ -20,8 +20,8 @@ package org.apache.spark.io
import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
import org.scalatest.FunSuite
-import org.apache.spark.SparkConf
+import org.apache.spark.SparkConf
class CompressionCodecSuite extends FunSuite {
val conf = new SparkConf(false)
diff --git a/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala b/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala
index 71a2c6c498..c1e8b295df 100644
--- a/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala
+++ b/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala
@@ -18,8 +18,9 @@
package org.apache.spark.metrics
import org.scalatest.{BeforeAndAfter, FunSuite}
-import org.apache.spark.deploy.master.MasterSource
+
import org.apache.spark.SparkConf
+import org.apache.spark.deploy.master.MasterSource
class MetricsSystemSuite extends FunSuite with BeforeAndAfter {
var filePath: String = _
diff --git a/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala b/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala
index 0d4c10db8e..3b833f2e41 100644
--- a/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala
@@ -30,7 +30,6 @@ import org.scalatest.time.SpanSugar._
import org.apache.spark.SparkContext._
import org.apache.spark.{SparkContext, SparkException, LocalSparkContext}
-
class AsyncRDDActionsSuite extends FunSuite with BeforeAndAfterAll with Timeouts {
@transient private var sc: SparkContext = _
diff --git a/core/src/test/scala/org/apache/spark/rdd/DoubleRDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/DoubleRDDSuite.scala
index 7f50a5a47c..a822bd18bf 100644
--- a/core/src/test/scala/org/apache/spark/rdd/DoubleRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/DoubleRDDSuite.scala
@@ -17,14 +17,10 @@
package org.apache.spark.rdd
-import scala.math.abs
-import scala.collection.mutable.ArrayBuffer
-
import org.scalatest.FunSuite
-import org.apache.spark.SparkContext._
-import org.apache.spark.rdd._
import org.apache.spark._
+import org.apache.spark.SparkContext._
class DoubleRDDSuite extends FunSuite with SharedSparkContext {
// Verify tests on the histogram functionality. We test with both evenly
diff --git a/core/src/test/scala/org/apache/spark/rdd/JdbcRDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/JdbcRDDSuite.scala
index 3d39a31252..7c7f69b261 100644
--- a/core/src/test/scala/org/apache/spark/rdd/JdbcRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/JdbcRDDSuite.scala
@@ -17,11 +17,12 @@
package org.apache.spark
-import org.scalatest.{ BeforeAndAfter, FunSuite }
-import org.apache.spark.SparkContext._
-import org.apache.spark.rdd.JdbcRDD
import java.sql._
+import org.scalatest.{BeforeAndAfter, FunSuite}
+
+import org.apache.spark.rdd.JdbcRDD
+
class JdbcRDDSuite extends FunSuite with BeforeAndAfter with LocalSparkContext {
before {
diff --git a/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala b/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala
index 5da538a1dd..fa5c9b10fe 100644
--- a/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala
@@ -22,12 +22,11 @@ import scala.collection.mutable.HashSet
import scala.util.Random
import org.scalatest.FunSuite
-
import com.google.common.io.Files
+
import org.apache.spark.SparkContext._
import org.apache.spark.{Partitioner, SharedSparkContext}
-
class PairRDDFunctionsSuite extends FunSuite with SharedSparkContext {
test("groupByKey") {
val pairs = sc.parallelize(Array((1, 1), (1, 2), (1, 3), (2, 1)))
diff --git a/core/src/test/scala/org/apache/spark/rdd/ParallelCollectionSplitSuite.scala b/core/src/test/scala/org/apache/spark/rdd/ParallelCollectionSplitSuite.scala
index a80afdee7e..a4381a8b97 100644
--- a/core/src/test/scala/org/apache/spark/rdd/ParallelCollectionSplitSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/ParallelCollectionSplitSuite.scala
@@ -19,11 +19,11 @@ package org.apache.spark.rdd
import scala.collection.immutable.NumericRange
-import org.scalatest.FunSuite
-import org.scalatest.prop.Checkers
import org.scalacheck.Arbitrary._
import org.scalacheck.Gen
import org.scalacheck.Prop._
+import org.scalatest.FunSuite
+import org.scalatest.prop.Checkers
class ParallelCollectionSplitSuite extends FunSuite with Checkers {
test("one element per slice") {
diff --git a/core/src/test/scala/org/apache/spark/rdd/PartitionPruningRDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/PartitionPruningRDDSuite.scala
index 53a7b7c44d..956c2b9cbd 100644
--- a/core/src/test/scala/org/apache/spark/rdd/PartitionPruningRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/PartitionPruningRDDSuite.scala
@@ -18,8 +18,8 @@
package org.apache.spark.rdd
import org.scalatest.FunSuite
-import org.apache.spark.{TaskContext, Partition, SharedSparkContext}
+import org.apache.spark.{Partition, SharedSparkContext, TaskContext}
class PartitionPruningRDDSuite extends FunSuite with SharedSparkContext {
diff --git a/core/src/test/scala/org/apache/spark/rdd/PartitionwiseSampledRDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/PartitionwiseSampledRDDSuite.scala
index cfe96fb3f7..00c273df63 100644
--- a/core/src/test/scala/org/apache/spark/rdd/PartitionwiseSampledRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/PartitionwiseSampledRDDSuite.scala
@@ -18,6 +18,7 @@
package org.apache.spark.rdd
import org.scalatest.FunSuite
+
import org.apache.spark.SharedSparkContext
import org.apache.spark.util.random.RandomSampler
diff --git a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
index 308c7cc8c3..60bcada552 100644
--- a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
@@ -18,13 +18,15 @@
package org.apache.spark.rdd
import scala.collection.mutable.HashMap
+import scala.collection.parallel.mutable
+
import org.scalatest.FunSuite
import org.scalatest.concurrent.Timeouts._
-import org.scalatest.time.{Span, Millis}
+import org.scalatest.time.{Millis, Span}
+
+import org.apache.spark._
import org.apache.spark.SparkContext._
import org.apache.spark.rdd._
-import scala.collection.parallel.mutable
-import org.apache.spark._
class RDDSuite extends FunSuite with SharedSparkContext {
diff --git a/core/src/test/scala/org/apache/spark/rdd/SortingSuite.scala b/core/src/test/scala/org/apache/spark/rdd/SortingSuite.scala
index e836119942..d0619559bb 100644
--- a/core/src/test/scala/org/apache/spark/rdd/SortingSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/SortingSuite.scala
@@ -18,7 +18,6 @@
package org.apache.spark.rdd
import org.scalatest.FunSuite
-import org.scalatest.BeforeAndAfter
import org.scalatest.matchers.ShouldMatchers
import org.apache.spark.{Logging, SharedSparkContext}
diff --git a/core/src/test/scala/org/apache/spark/scheduler/ClusterSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/ClusterSchedulerSuite.scala
index 98ea4cb561..85e929925e 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/ClusterSchedulerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/ClusterSchedulerSuite.scala
@@ -17,13 +17,13 @@
package org.apache.spark.scheduler
-import org.scalatest.FunSuite
-import org.scalatest.BeforeAndAfter
+import java.util.Properties
-import org.apache.spark._
import scala.collection.mutable.ArrayBuffer
-import java.util.Properties
+import org.scalatest.FunSuite
+
+import org.apache.spark._
class FakeTaskSetManager(
initPriority: Int,
diff --git a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
index f0236ef1e9..ad890b4e4d 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
@@ -20,11 +20,12 @@ package org.apache.spark.scheduler
import scala.Tuple2
import scala.collection.mutable.{HashMap, Map}
+import org.scalatest.{BeforeAndAfter, FunSuite}
+
import org.apache.spark._
import org.apache.spark.rdd.RDD
import org.apache.spark.scheduler.SchedulingMode.SchedulingMode
import org.apache.spark.storage.{BlockId, BlockManagerId, BlockManagerMaster}
-import org.scalatest.{BeforeAndAfter, FunSuite}
/**
* Tests for DAGScheduler. These tests directly call the event processing functions in DAGScheduler
diff --git a/core/src/test/scala/org/apache/spark/scheduler/JobLoggerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/JobLoggerSuite.scala
index 29102913c7..25fe63c265 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/JobLoggerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/JobLoggerSuite.scala
@@ -17,11 +17,6 @@
package org.apache.spark.scheduler
-import java.util.Properties
-import java.util.concurrent.LinkedBlockingQueue
-
-import scala.collection.mutable
-
import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
@@ -29,7 +24,6 @@ import org.apache.spark._
import org.apache.spark.SparkContext._
import org.apache.spark.rdd.RDD
-
class JobLoggerSuite extends FunSuite with LocalSparkContext with ShouldMatchers {
val WAIT_TIMEOUT_MILLIS = 10000
diff --git a/core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala
index e31a116a75..8bb5317cd2 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala
@@ -19,11 +19,12 @@ package org.apache.spark.scheduler
import org.scalatest.FunSuite
import org.scalatest.BeforeAndAfter
+
+import org.apache.spark.LocalSparkContext
+import org.apache.spark.Partition
+import org.apache.spark.SparkContext
import org.apache.spark.TaskContext
import org.apache.spark.rdd.RDD
-import org.apache.spark.SparkContext
-import org.apache.spark.Partition
-import org.apache.spark.LocalSparkContext
class TaskContextSuite extends FunSuite with BeforeAndAfter with LocalSparkContext {
diff --git a/core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala
index 4b52d9651e..ac07f60e28 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala
@@ -21,7 +21,7 @@ import java.nio.ByteBuffer
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, FunSuite}
-import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkEnv}
+import org.apache.spark.{LocalSparkContext, SparkContext, SparkEnv}
import org.apache.spark.storage.TaskResultBlockId
/**
diff --git a/core/src/test/scala/org/apache/spark/scheduler/TaskSetManagerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/TaskSetManagerSuite.scala
index de321c45b5..34a7d8cefe 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/TaskSetManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/TaskSetManagerSuite.scala
@@ -24,8 +24,7 @@ import org.scalatest.FunSuite
import org.apache.spark._
import org.apache.spark.executor.TaskMetrics
-import java.nio.ByteBuffer
-import org.apache.spark.util.{Utils, FakeClock}
+import org.apache.spark.util.FakeClock
class FakeDAGScheduler(taskScheduler: FakeClusterScheduler) extends DAGScheduler(taskScheduler) {
override def taskStarted(task: Task[_], taskInfo: TaskInfo) {
diff --git a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
index 3898583275..5d4673aebe 100644
--- a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
@@ -20,9 +20,9 @@ package org.apache.spark.serializer
import scala.collection.mutable
import com.esotericsoftware.kryo.Kryo
-
import org.scalatest.FunSuite
-import org.apache.spark.{SparkConf, SharedSparkContext}
+
+import org.apache.spark.SharedSparkContext
import org.apache.spark.serializer.KryoTest._
class KryoSerializerSuite extends FunSuite with SharedSparkContext {
diff --git a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
index 85011c6451..9f011d9c8d 100644
--- a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
@@ -20,18 +20,17 @@ package org.apache.spark.storage
import java.nio.ByteBuffer
import akka.actor._
-
-import org.scalatest.FunSuite
import org.scalatest.BeforeAndAfter
+import org.scalatest.FunSuite
import org.scalatest.PrivateMethodTester
import org.scalatest.concurrent.Eventually._
import org.scalatest.concurrent.Timeouts._
import org.scalatest.matchers.ShouldMatchers._
import org.scalatest.time.SpanSugar._
-import org.apache.spark.util.{SizeEstimator, Utils, AkkaUtils, ByteBufferInputStream}
-import org.apache.spark.serializer.{JavaSerializer, KryoSerializer}
import org.apache.spark.{SparkConf, SparkContext}
+import org.apache.spark.serializer.{JavaSerializer, KryoSerializer}
+import org.apache.spark.util.{AkkaUtils, ByteBufferInputStream, SizeEstimator, Utils}
class BlockManagerSuite extends FunSuite with BeforeAndAfter with PrivateMethodTester {
private val conf = new SparkConf(false)
diff --git a/core/src/test/scala/org/apache/spark/storage/DiskBlockManagerSuite.scala b/core/src/test/scala/org/apache/spark/storage/DiskBlockManagerSuite.scala
index 829f389460..62f9b3cc7b 100644
--- a/core/src/test/scala/org/apache/spark/storage/DiskBlockManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/storage/DiskBlockManagerSuite.scala
@@ -22,9 +22,10 @@ import java.io.{File, FileWriter}
import scala.collection.mutable
import com.google.common.io.Files
-import org.apache.spark.SparkConf
import org.scalatest.{BeforeAndAfterEach, FunSuite}
+import org.apache.spark.SparkConf
+
class DiskBlockManagerSuite extends FunSuite with BeforeAndAfterEach {
private val testConf = new SparkConf(false)
val rootDir0 = Files.createTempDir()
diff --git a/core/src/test/scala/org/apache/spark/ui/UISuite.scala b/core/src/test/scala/org/apache/spark/ui/UISuite.scala
index c17bbfe7d3..20ebb1897e 100644
--- a/core/src/test/scala/org/apache/spark/ui/UISuite.scala
+++ b/core/src/test/scala/org/apache/spark/ui/UISuite.scala
@@ -17,10 +17,12 @@
package org.apache.spark.ui
-import scala.util.{Failure, Success, Try}
import java.net.ServerSocket
-import org.scalatest.FunSuite
+
+import scala.util.{Failure, Success, Try}
+
import org.eclipse.jetty.server.Server
+import org.scalatest.FunSuite
class UISuite extends FunSuite {
test("jetty port increases under contention") {
diff --git a/core/src/test/scala/org/apache/spark/ui/jobs/JobProgressListenerSuite.scala b/core/src/test/scala/org/apache/spark/ui/jobs/JobProgressListenerSuite.scala
index 67a57a0e7f..8ca863e8b3 100644
--- a/core/src/test/scala/org/apache/spark/ui/jobs/JobProgressListenerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ui/jobs/JobProgressListenerSuite.scala
@@ -18,10 +18,10 @@
package org.apache.spark.ui.jobs
import org.scalatest.FunSuite
-import org.apache.spark.scheduler._
+
import org.apache.spark.{LocalSparkContext, SparkContext, Success}
-import org.apache.spark.scheduler.SparkListenerTaskStart
import org.apache.spark.executor.{ShuffleReadMetrics, TaskMetrics}
+import org.apache.spark.scheduler._
class JobProgressListenerSuite extends FunSuite with LocalSparkContext {
test("test executor id to summary") {
diff --git a/core/src/test/scala/org/apache/spark/util/ClosureCleanerSuite.scala b/core/src/test/scala/org/apache/spark/util/ClosureCleanerSuite.scala
index de4871d043..439e5644e2 100644
--- a/core/src/test/scala/org/apache/spark/util/ClosureCleanerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/ClosureCleanerSuite.scala
@@ -17,12 +17,10 @@
package org.apache.spark.util
-import java.io.NotSerializableException
-
import org.scalatest.FunSuite
-import org.apache.spark.SparkContext
import org.apache.spark.LocalSparkContext._
+import org.apache.spark.SparkContext
class ClosureCleanerSuite extends FunSuite {
test("closures inside an object") {
diff --git a/core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala b/core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala
index 45867463a5..e1446cbc90 100644
--- a/core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala
@@ -17,10 +17,12 @@
package org.apache.spark.util
+import java.util.NoSuchElementException
+
+import scala.collection.mutable.Buffer
+
import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
-import scala.collection.mutable.Buffer
-import java.util.NoSuchElementException
class NextIteratorSuite extends FunSuite with ShouldMatchers {
test("one iteration") {
diff --git a/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala b/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala
index 11ebdc352b..b583a8bd46 100644
--- a/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala
@@ -17,10 +17,9 @@
package org.apache.spark.util
-import org.scalatest.FunSuite
import org.scalatest.BeforeAndAfterAll
+import org.scalatest.FunSuite
import org.scalatest.PrivateMethodTester
-import org.apache.spark.SparkContext
class DummyClass1 {}
diff --git a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
index 7030ba4858..8f55b2372c 100644
--- a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
@@ -17,13 +17,15 @@
package org.apache.spark.util
+import scala.util.Random
+
+import java.io.{ByteArrayOutputStream, ByteArrayInputStream, FileOutputStream}
+import java.nio.{ByteBuffer, ByteOrder}
+
import com.google.common.base.Charsets
import com.google.common.io.Files
-import java.io.{ByteArrayOutputStream, ByteArrayInputStream, FileOutputStream, File}
-import java.nio.{ByteBuffer, ByteOrder}
-import org.scalatest.FunSuite
import org.apache.commons.io.FileUtils
-import scala.util.Random
+import org.scalatest.FunSuite
class UtilsSuite extends FunSuite {
diff --git a/core/src/test/scala/org/apache/spark/util/collection/AppendOnlyMapSuite.scala b/core/src/test/scala/org/apache/spark/util/collection/AppendOnlyMapSuite.scala
index f44442f1a5..52c7288e18 100644
--- a/core/src/test/scala/org/apache/spark/util/collection/AppendOnlyMapSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/collection/AppendOnlyMapSuite.scala
@@ -17,10 +17,11 @@
package org.apache.spark.util.collection
+import java.util.Comparator
+
import scala.collection.mutable.HashSet
import org.scalatest.FunSuite
-import java.util.Comparator
class AppendOnlyMapSuite extends FunSuite {
test("initialization") {
diff --git a/core/src/test/scala/org/apache/spark/util/collection/BitSetSuite.scala b/core/src/test/scala/org/apache/spark/util/collection/BitSetSuite.scala
index 0f1ab3d20e..c32183c134 100644
--- a/core/src/test/scala/org/apache/spark/util/collection/BitSetSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/collection/BitSetSuite.scala
@@ -19,7 +19,6 @@ package org.apache.spark.util.collection
import org.scalatest.FunSuite
-
class BitSetSuite extends FunSuite {
test("basic set and get") {
diff --git a/core/src/test/scala/org/apache/spark/util/collection/OpenHashMapSuite.scala b/core/src/test/scala/org/apache/spark/util/collection/OpenHashMapSuite.scala
index e9b62ea70d..b024c89d94 100644
--- a/core/src/test/scala/org/apache/spark/util/collection/OpenHashMapSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/collection/OpenHashMapSuite.scala
@@ -18,8 +18,10 @@
package org.apache.spark.util.collection
import scala.collection.mutable.HashSet
+
import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
+
import org.apache.spark.util.SizeEstimator
class OpenHashMapSuite extends FunSuite with ShouldMatchers {
diff --git a/core/src/test/scala/org/apache/spark/util/collection/OpenHashSetSuite.scala b/core/src/test/scala/org/apache/spark/util/collection/OpenHashSetSuite.scala
index 1b24f8f287..ff4a98f5dc 100644
--- a/core/src/test/scala/org/apache/spark/util/collection/OpenHashSetSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/collection/OpenHashSetSuite.scala
@@ -22,7 +22,6 @@ import org.scalatest.matchers.ShouldMatchers
import org.apache.spark.util.SizeEstimator
-
class OpenHashSetSuite extends FunSuite with ShouldMatchers {
test("size for specialized, primitive int") {
diff --git a/core/src/test/scala/org/apache/spark/util/collection/PrimitiveKeyOpenHashMapSuite.scala b/core/src/test/scala/org/apache/spark/util/collection/PrimitiveKeyOpenHashMapSuite.scala
index 3b60decee9..e3fca17390 100644
--- a/core/src/test/scala/org/apache/spark/util/collection/PrimitiveKeyOpenHashMapSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/collection/PrimitiveKeyOpenHashMapSuite.scala
@@ -18,8 +18,10 @@
package org.apache.spark.util.collection
import scala.collection.mutable.HashSet
+
import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
+
import org.apache.spark.util.SizeEstimator
class PrimitiveKeyOpenHashMapSuite extends FunSuite with ShouldMatchers {
diff --git a/core/src/test/scala/org/apache/spark/util/random/RandomSamplerSuite.scala b/core/src/test/scala/org/apache/spark/util/random/RandomSamplerSuite.scala
index 0f4792cd3b..7576c9a51f 100644
--- a/core/src/test/scala/org/apache/spark/util/random/RandomSamplerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/random/RandomSamplerSuite.scala
@@ -17,11 +17,11 @@
package org.apache.spark.util.random
-import org.scalatest.{BeforeAndAfter, FunSuite}
-import org.scalatest.mock.EasyMockSugar
-
import java.util.Random
+
import cern.jet.random.Poisson
+import org.scalatest.{BeforeAndAfter, FunSuite}
+import org.scalatest.mock.EasyMockSugar
class RandomSamplerSuite extends FunSuite with BeforeAndAfter with EasyMockSugar {
diff --git a/core/src/test/scala/org/apache/spark/util/random/XORShiftRandomSuite.scala b/core/src/test/scala/org/apache/spark/util/random/XORShiftRandomSuite.scala
index 352aa94219..c51d12bfe0 100644
--- a/core/src/test/scala/org/apache/spark/util/random/XORShiftRandomSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/random/XORShiftRandomSuite.scala
@@ -19,6 +19,7 @@ package org.apache.spark.util.random
import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
+
import org.apache.spark.util.Utils.times
class XORShiftRandomSuite extends FunSuite with ShouldMatchers {