From b5f02d6743ecb1633b7b13382f76cb8bfc2aa95c Mon Sep 17 00:00:00 2001 From: Dongjoon Hyun Date: Thu, 3 Mar 2016 10:12:32 +0000 Subject: [SPARK-13583][CORE][STREAMING] Remove unused imports and add checkstyle rule ## What changes were proposed in this pull request? After SPARK-6990, `dev/lint-java` keeps Java code healthy and helps PR review by saving much time. This issue aims remove unused imports from Java/Scala code and add `UnusedImports` checkstyle rule to help developers. ## How was this patch tested? ``` ./dev/lint-java ./build/sbt compile ``` Author: Dongjoon Hyun Closes #11438 from dongjoon-hyun/SPARK-13583. --- .../java/org/apache/spark/shuffle/sort/PackedRecordPointer.java | 4 +--- .../java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java | 1 - .../apache/spark/util/collection/unsafe/sort/PrefixComparators.java | 1 - .../util/collection/unsafe/sort/RecordPointerAndKeyPrefix.java | 4 +--- .../src/main/scala/org/apache/spark/ExecutorAllocationManager.scala | 2 +- core/src/main/scala/org/apache/spark/HeartbeatReceiver.scala | 1 - core/src/main/scala/org/apache/spark/Partitioner.scala | 4 ++-- core/src/main/scala/org/apache/spark/SparkContext.scala | 1 - .../main/scala/org/apache/spark/TaskNotSerializableException.scala | 2 -- core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala | 3 --- .../main/scala/org/apache/spark/broadcast/BroadcastFactory.scala | 1 - core/src/main/scala/org/apache/spark/deploy/RPackageUtils.scala | 2 +- core/src/main/scala/org/apache/spark/deploy/master/DriverInfo.scala | 1 - core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala | 2 +- core/src/main/scala/org/apache/spark/deploy/worker/ui/LogPage.scala | 1 - core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala | 1 - core/src/main/scala/org/apache/spark/rdd/JdbcRDD.scala | 2 +- core/src/main/scala/org/apache/spark/rdd/LocalCheckpointRDD.scala | 2 +- .../main/scala/org/apache/spark/rdd/ReliableRDDCheckpointData.scala | 1 - core/src/main/scala/org/apache/spark/rpc/netty/NettyRpcEnv.scala | 1 - core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala | 1 - .../main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala | 2 -- core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala | 1 - .../spark/scheduler/cluster/CoarseGrainedClusterMessage.scala | 2 +- core/src/main/scala/org/apache/spark/serializer/Serializer.scala | 4 ++-- .../src/main/scala/org/apache/spark/shuffle/BaseShuffleHandle.scala | 3 +-- .../main/scala/org/apache/spark/shuffle/ShuffleBlockResolver.scala | 2 -- .../scala/org/apache/spark/shuffle/hash/HashShuffleWriter.scala | 1 - .../scala/org/apache/spark/shuffle/sort/SortShuffleWriter.scala | 1 - .../main/scala/org/apache/spark/storage/BlockManagerMaster.scala | 2 +- core/src/main/scala/org/apache/spark/storage/DiskStore.scala | 1 - core/src/main/scala/org/apache/spark/ui/JettyUtils.scala | 2 +- .../scala/org/apache/spark/ui/exec/ExecutorThreadDumpPage.scala | 1 - core/src/main/scala/org/apache/spark/util/SizeEstimator.scala | 1 - .../main/scala/org/apache/spark/util/collection/AppendOnlyMap.scala | 2 +- .../src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java | 3 --- .../java/org/apache/spark/serializer/TestJavaSerializerImpl.java | 1 - core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala | 2 +- core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala | 3 --- core/src/test/scala/org/apache/spark/ThreadingSuite.scala | 4 +--- .../test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala | 2 +- .../test/scala/org/apache/spark/deploy/client/AppClientSuite.scala | 1 - .../org/apache/spark/deploy/history/FsHistoryProviderSuite.scala | 6 +----- .../org/apache/spark/input/WholeTextFileRecordReaderSuite.scala | 2 +- .../test/scala/org/apache/spark/launcher/LauncherBackendSuite.scala | 1 - core/src/test/scala/org/apache/spark/memory/TestMemoryManager.scala | 4 +--- core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala | 2 +- core/src/test/scala/org/apache/spark/rpc/netty/InboxSuite.scala | 2 +- .../scheduler/cluster/mesos/CoarseMesosSchedulerBackendSuite.scala | 2 +- core/src/test/scala/org/apache/spark/ui/UISuite.scala | 1 - .../test/scala/org/apache/spark/util/ResetSystemProperties.scala | 2 -- core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala | 2 +- 52 files changed, 26 insertions(+), 77 deletions(-) (limited to 'core') diff --git a/core/src/main/java/org/apache/spark/shuffle/sort/PackedRecordPointer.java b/core/src/main/java/org/apache/spark/shuffle/sort/PackedRecordPointer.java index f8f2b220e1..f7a6c68be9 100644 --- a/core/src/main/java/org/apache/spark/shuffle/sort/PackedRecordPointer.java +++ b/core/src/main/java/org/apache/spark/shuffle/sort/PackedRecordPointer.java @@ -17,8 +17,6 @@ package org.apache.spark.shuffle.sort; -import org.apache.spark.memory.TaskMemoryManager; - /** * Wrapper around an 8-byte word that holds a 24-bit partition number and 40-bit record pointer. *

@@ -28,7 +26,7 @@ import org.apache.spark.memory.TaskMemoryManager; * * This implies that the maximum addressable page size is 2^27 bits = 128 megabytes, assuming that * our offsets in pages are not 8-byte-word-aligned. Since we have 2^13 pages (based off the - * 13-bit page numbers assigned by {@link TaskMemoryManager}), this + * 13-bit page numbers assigned by {@link org.apache.spark.memory.TaskMemoryManager}), this * implies that we can address 2^13 * 128 megabytes = 1 terabyte of RAM per task. *

* Assuming word-alignment would allow for a 1 gigabyte maximum page size, but we leave this diff --git a/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java b/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java index 128a82579b..3f4402bd3a 100644 --- a/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java +++ b/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java @@ -25,7 +25,6 @@ import java.util.Iterator; import scala.Option; import scala.Product2; import scala.collection.JavaConverters; -import scala.collection.immutable.Map; import scala.reflect.ClassTag; import scala.reflect.ClassTag$; diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java index d2bf297c6c..c2a8f429be 100644 --- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java +++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java @@ -20,7 +20,6 @@ package org.apache.spark.util.collection.unsafe.sort; import com.google.common.primitives.UnsignedLongs; import org.apache.spark.annotation.Private; -import org.apache.spark.unsafe.Platform; import org.apache.spark.unsafe.types.ByteArray; import org.apache.spark.unsafe.types.UTF8String; import org.apache.spark.util.Utils; diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/RecordPointerAndKeyPrefix.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/RecordPointerAndKeyPrefix.java index dbf6770e07..de92b8db47 100644 --- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/RecordPointerAndKeyPrefix.java +++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/RecordPointerAndKeyPrefix.java @@ -17,11 +17,9 @@ package org.apache.spark.util.collection.unsafe.sort; -import org.apache.spark.memory.TaskMemoryManager; - final class RecordPointerAndKeyPrefix { /** - * A pointer to a record; see {@link TaskMemoryManager} for a + * A pointer to a record; see {@link org.apache.spark.memory.TaskMemoryManager} for a * description of how these addresses are encoded. */ public long recordPointer; diff --git a/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala b/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala index db143d7341..9b8279f43e 100644 --- a/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala +++ b/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala @@ -26,7 +26,7 @@ import com.codahale.metrics.{Gauge, MetricRegistry} import org.apache.spark.metrics.source.Source import org.apache.spark.scheduler._ -import org.apache.spark.util.{Clock, SystemClock, ThreadUtils, Utils} +import org.apache.spark.util.{Clock, SystemClock, ThreadUtils} /** * An agent that dynamically allocates and removes executors based on the workload. diff --git a/core/src/main/scala/org/apache/spark/HeartbeatReceiver.scala b/core/src/main/scala/org/apache/spark/HeartbeatReceiver.scala index 45b20c0e8d..7f474ed591 100644 --- a/core/src/main/scala/org/apache/spark/HeartbeatReceiver.scala +++ b/core/src/main/scala/org/apache/spark/HeartbeatReceiver.scala @@ -22,7 +22,6 @@ import java.util.concurrent.{ScheduledFuture, TimeUnit} import scala.collection.mutable import scala.concurrent.Future -import org.apache.spark.executor.TaskMetrics import org.apache.spark.rpc.{RpcCallContext, RpcEnv, ThreadSafeRpcEndpoint} import org.apache.spark.scheduler._ import org.apache.spark.storage.BlockManagerId diff --git a/core/src/main/scala/org/apache/spark/Partitioner.scala b/core/src/main/scala/org/apache/spark/Partitioner.scala index 976c19f2b0..98c3abe93b 100644 --- a/core/src/main/scala/org/apache/spark/Partitioner.scala +++ b/core/src/main/scala/org/apache/spark/Partitioner.scala @@ -21,13 +21,13 @@ import java.io.{IOException, ObjectInputStream, ObjectOutputStream} import scala.collection.mutable import scala.collection.mutable.ArrayBuffer -import scala.reflect.{classTag, ClassTag} +import scala.reflect.ClassTag import scala.util.hashing.byteswap32 import org.apache.spark.rdd.{PartitionPruningRDD, RDD} import org.apache.spark.serializer.JavaSerializer import org.apache.spark.util.{CollectionsUtils, Utils} -import org.apache.spark.util.random.{SamplingUtils, XORShiftRandom} +import org.apache.spark.util.random.SamplingUtils /** * An object that defines how the elements in a key-value pair RDD are partitioned by key. diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index b503c6184a..9f5a72bae0 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -51,7 +51,6 @@ import org.apache.spark.deploy.{LocalSparkCluster, SparkHadoopUtil} import org.apache.spark.input.{FixedLengthBinaryInputFormat, PortableDataStream, StreamInputFormat, WholeTextFileInputFormat} import org.apache.spark.io.CompressionCodec -import org.apache.spark.metrics.MetricsSystem import org.apache.spark.partial.{ApproximateEvaluator, PartialResult} import org.apache.spark.rdd._ import org.apache.spark.rpc.RpcEndpointRef diff --git a/core/src/main/scala/org/apache/spark/TaskNotSerializableException.scala b/core/src/main/scala/org/apache/spark/TaskNotSerializableException.scala index 9df61062e1..0cb93f131a 100644 --- a/core/src/main/scala/org/apache/spark/TaskNotSerializableException.scala +++ b/core/src/main/scala/org/apache/spark/TaskNotSerializableException.scala @@ -17,8 +17,6 @@ package org.apache.spark -import org.apache.spark.annotation.DeveloperApi - /** * Exception thrown when a task cannot be serialized. */ diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala b/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala index ed312770ee..20d6c9341b 100644 --- a/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala +++ b/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala @@ -17,8 +17,6 @@ package org.apache.spark.api.java -import java.util.Comparator - import scala.language.implicitConversions import scala.reflect.ClassTag @@ -191,7 +189,6 @@ class JavaRDD[T](val rdd: RDD[T])(implicit val classTag: ClassTag[T]) * Return this RDD sorted by the given key function. */ def sortBy[S](f: JFunction[T, S], ascending: Boolean, numPartitions: Int): JavaRDD[T] = { - import scala.collection.JavaConverters._ def fn: (T) => S = (x: T) => f.call(x) import com.google.common.collect.Ordering // shadows scala.math.Ordering implicit val ordering = Ordering.natural().asInstanceOf[Ordering[S]] diff --git a/core/src/main/scala/org/apache/spark/broadcast/BroadcastFactory.scala b/core/src/main/scala/org/apache/spark/broadcast/BroadcastFactory.scala index 7f35ac4747..fd7b4fc88b 100644 --- a/core/src/main/scala/org/apache/spark/broadcast/BroadcastFactory.scala +++ b/core/src/main/scala/org/apache/spark/broadcast/BroadcastFactory.scala @@ -21,7 +21,6 @@ import scala.reflect.ClassTag import org.apache.spark.SecurityManager import org.apache.spark.SparkConf -import org.apache.spark.annotation.DeveloperApi /** * An interface for all the broadcast implementations in Spark (to allow diff --git a/core/src/main/scala/org/apache/spark/deploy/RPackageUtils.scala b/core/src/main/scala/org/apache/spark/deploy/RPackageUtils.scala index 4911c3be3a..81718e0c44 100644 --- a/core/src/main/scala/org/apache/spark/deploy/RPackageUtils.scala +++ b/core/src/main/scala/org/apache/spark/deploy/RPackageUtils.scala @@ -26,7 +26,7 @@ import scala.collection.JavaConverters._ import com.google.common.io.{ByteStreams, Files} -import org.apache.spark.{Logging, SparkException} +import org.apache.spark.Logging import org.apache.spark.api.r.RUtils import org.apache.spark.util.{RedirectThread, Utils} diff --git a/core/src/main/scala/org/apache/spark/deploy/master/DriverInfo.scala b/core/src/main/scala/org/apache/spark/deploy/master/DriverInfo.scala index b197dbcbfe..8d5edae050 100644 --- a/core/src/main/scala/org/apache/spark/deploy/master/DriverInfo.scala +++ b/core/src/main/scala/org/apache/spark/deploy/master/DriverInfo.scala @@ -19,7 +19,6 @@ package org.apache.spark.deploy.master import java.util.Date -import org.apache.spark.annotation.DeveloperApi import org.apache.spark.deploy.DriverDescription import org.apache.spark.util.Utils diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala index 1c24c631ee..283db6c4fe 100755 --- a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala +++ b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala @@ -37,7 +37,7 @@ import org.apache.spark.deploy.master.{DriverState, Master} import org.apache.spark.deploy.worker.ui.WorkerWebUI import org.apache.spark.metrics.MetricsSystem import org.apache.spark.rpc._ -import org.apache.spark.util.{SignalLogger, ThreadUtils, Utils} +import org.apache.spark.util.{ThreadUtils, Utils} private[deploy] class Worker( override val rpcEnv: RpcEnv, diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/ui/LogPage.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ui/LogPage.scala index 0ca90640ae..09ae64af8a 100644 --- a/core/src/main/scala/org/apache/spark/deploy/worker/ui/LogPage.scala +++ b/core/src/main/scala/org/apache/spark/deploy/worker/ui/LogPage.scala @@ -18,7 +18,6 @@ package org.apache.spark.deploy.worker.ui import java.io.File -import java.net.URI import javax.servlet.http.HttpServletRequest import scala.xml.Node diff --git a/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala b/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala index bd61d04d42..c9606600ed 100644 --- a/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala +++ b/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala @@ -18,7 +18,6 @@ package org.apache.spark.rdd import org.apache.spark.{Logging, TaskContext} -import org.apache.spark.annotation.Experimental import org.apache.spark.partial.BoundedDouble import org.apache.spark.partial.MeanEvaluator import org.apache.spark.partial.PartialResult diff --git a/core/src/main/scala/org/apache/spark/rdd/JdbcRDD.scala b/core/src/main/scala/org/apache/spark/rdd/JdbcRDD.scala index 469962db67..8cbe80d650 100644 --- a/core/src/main/scala/org/apache/spark/rdd/JdbcRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/JdbcRDD.scala @@ -17,7 +17,7 @@ package org.apache.spark.rdd -import java.sql.{Connection, PreparedStatement, ResultSet} +import java.sql.{Connection, ResultSet} import scala.reflect.ClassTag diff --git a/core/src/main/scala/org/apache/spark/rdd/LocalCheckpointRDD.scala b/core/src/main/scala/org/apache/spark/rdd/LocalCheckpointRDD.scala index a163bbd264..503aa0dffc 100644 --- a/core/src/main/scala/org/apache/spark/rdd/LocalCheckpointRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/LocalCheckpointRDD.scala @@ -19,7 +19,7 @@ package org.apache.spark.rdd import scala.reflect.ClassTag -import org.apache.spark.{Partition, SparkContext, SparkEnv, SparkException, TaskContext} +import org.apache.spark.{Partition, SparkContext, SparkException, TaskContext} import org.apache.spark.storage.RDDBlockId /** diff --git a/core/src/main/scala/org/apache/spark/rdd/ReliableRDDCheckpointData.scala b/core/src/main/scala/org/apache/spark/rdd/ReliableRDDCheckpointData.scala index cac6cbe780..92f625f755 100644 --- a/core/src/main/scala/org/apache/spark/rdd/ReliableRDDCheckpointData.scala +++ b/core/src/main/scala/org/apache/spark/rdd/ReliableRDDCheckpointData.scala @@ -22,7 +22,6 @@ import scala.reflect.ClassTag import org.apache.hadoop.fs.Path import org.apache.spark._ -import org.apache.spark.util.SerializableConfiguration /** * An implementation of checkpointing that writes the RDD data to reliable storage. diff --git a/core/src/main/scala/org/apache/spark/rpc/netty/NettyRpcEnv.scala b/core/src/main/scala/org/apache/spark/rpc/netty/NettyRpcEnv.scala index 89eda857e6..c83a632e43 100644 --- a/core/src/main/scala/org/apache/spark/rpc/netty/NettyRpcEnv.scala +++ b/core/src/main/scala/org/apache/spark/rpc/netty/NettyRpcEnv.scala @@ -17,7 +17,6 @@ package org.apache.spark.rpc.netty import java.io._ -import java.lang.{Boolean => JBoolean} import java.net.{InetSocketAddress, URI} import java.nio.ByteBuffer import java.nio.channels.{Pipe, ReadableByteChannel, WritableByteChannel} diff --git a/core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala b/core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala index a3d2db3130..949e88f606 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala @@ -19,7 +19,6 @@ package org.apache.spark.scheduler import java.util.Properties -import org.apache.spark.TaskContext import org.apache.spark.util.CallSite /** diff --git a/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala b/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala index d5cd2da7a1..a3845c6acd 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala @@ -19,11 +19,9 @@ package org.apache.spark.scheduler import java.util.Properties -import scala.collection.Map import scala.language.existentials import org.apache.spark._ -import org.apache.spark.executor.TaskMetrics import org.apache.spark.rdd.RDD import org.apache.spark.util.CallSite diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala index fccd6e0699..8477a66b39 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala @@ -17,7 +17,6 @@ package org.apache.spark.scheduler -import org.apache.spark.executor.TaskMetrics import org.apache.spark.scheduler.SchedulingMode.SchedulingMode import org.apache.spark.storage.BlockManagerId diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedClusterMessage.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedClusterMessage.scala index 29e469c3f5..8d5c11dc36 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedClusterMessage.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedClusterMessage.scala @@ -22,7 +22,7 @@ import java.nio.ByteBuffer import org.apache.spark.TaskState.TaskState import org.apache.spark.rpc.RpcEndpointRef import org.apache.spark.scheduler.ExecutorLossReason -import org.apache.spark.util.{SerializableBuffer, Utils} +import org.apache.spark.util.SerializableBuffer private[spark] sealed trait CoarseGrainedClusterMessage extends Serializable diff --git a/core/src/main/scala/org/apache/spark/serializer/Serializer.scala b/core/src/main/scala/org/apache/spark/serializer/Serializer.scala index 90c0728557..95bdf0ce2d 100644 --- a/core/src/main/scala/org/apache/spark/serializer/Serializer.scala +++ b/core/src/main/scala/org/apache/spark/serializer/Serializer.scala @@ -23,9 +23,9 @@ import javax.annotation.concurrent.NotThreadSafe import scala.reflect.ClassTag -import org.apache.spark.{SparkConf, SparkEnv} +import org.apache.spark.SparkEnv import org.apache.spark.annotation.{DeveloperApi, Private} -import org.apache.spark.util.{ByteBufferInputStream, NextIterator, Utils} +import org.apache.spark.util.NextIterator /** * :: DeveloperApi :: diff --git a/core/src/main/scala/org/apache/spark/shuffle/BaseShuffleHandle.scala b/core/src/main/scala/org/apache/spark/shuffle/BaseShuffleHandle.scala index 0a65bbf8dd..04e4cf88d7 100644 --- a/core/src/main/scala/org/apache/spark/shuffle/BaseShuffleHandle.scala +++ b/core/src/main/scala/org/apache/spark/shuffle/BaseShuffleHandle.scala @@ -17,8 +17,7 @@ package org.apache.spark.shuffle -import org.apache.spark.{Aggregator, Partitioner, ShuffleDependency} -import org.apache.spark.serializer.Serializer +import org.apache.spark.ShuffleDependency /** * A basic ShuffleHandle implementation that just captures registerShuffle's parameters. diff --git a/core/src/main/scala/org/apache/spark/shuffle/ShuffleBlockResolver.scala b/core/src/main/scala/org/apache/spark/shuffle/ShuffleBlockResolver.scala index 81aea33ee4..d1ecbc1bf0 100644 --- a/core/src/main/scala/org/apache/spark/shuffle/ShuffleBlockResolver.scala +++ b/core/src/main/scala/org/apache/spark/shuffle/ShuffleBlockResolver.scala @@ -17,8 +17,6 @@ package org.apache.spark.shuffle -import java.nio.ByteBuffer - import org.apache.spark.network.buffer.ManagedBuffer import org.apache.spark.storage.ShuffleBlockId diff --git a/core/src/main/scala/org/apache/spark/shuffle/hash/HashShuffleWriter.scala b/core/src/main/scala/org/apache/spark/shuffle/hash/HashShuffleWriter.scala index 28bcced901..7694e950be 100644 --- a/core/src/main/scala/org/apache/spark/shuffle/hash/HashShuffleWriter.scala +++ b/core/src/main/scala/org/apache/spark/shuffle/hash/HashShuffleWriter.scala @@ -20,7 +20,6 @@ package org.apache.spark.shuffle.hash import java.io.IOException import org.apache.spark._ -import org.apache.spark.executor.ShuffleWriteMetrics import org.apache.spark.scheduler.MapStatus import org.apache.spark.serializer.Serializer import org.apache.spark.shuffle._ diff --git a/core/src/main/scala/org/apache/spark/shuffle/sort/SortShuffleWriter.scala b/core/src/main/scala/org/apache/spark/shuffle/sort/SortShuffleWriter.scala index 7eb3d96037..4a7b1f07b6 100644 --- a/core/src/main/scala/org/apache/spark/shuffle/sort/SortShuffleWriter.scala +++ b/core/src/main/scala/org/apache/spark/shuffle/sort/SortShuffleWriter.scala @@ -18,7 +18,6 @@ package org.apache.spark.shuffle.sort import org.apache.spark._ -import org.apache.spark.executor.ShuffleWriteMetrics import org.apache.spark.scheduler.MapStatus import org.apache.spark.shuffle.{BaseShuffleHandle, IndexShuffleBlockResolver, ShuffleWriter} import org.apache.spark.storage.ShuffleBlockId diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala index 0b7aa599e9..1cb027a3f5 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala @@ -19,7 +19,7 @@ package org.apache.spark.storage import scala.collection.Iterable import scala.collection.generic.CanBuildFrom -import scala.concurrent.{Await, Future} +import scala.concurrent.Future import org.apache.spark.{Logging, SparkConf, SparkException} import org.apache.spark.rpc.RpcEndpointRef diff --git a/core/src/main/scala/org/apache/spark/storage/DiskStore.scala b/core/src/main/scala/org/apache/spark/storage/DiskStore.scala index bfa6560a72..db12a4a1b9 100644 --- a/core/src/main/scala/org/apache/spark/storage/DiskStore.scala +++ b/core/src/main/scala/org/apache/spark/storage/DiskStore.scala @@ -22,7 +22,6 @@ import java.nio.ByteBuffer import java.nio.channels.FileChannel.MapMode import org.apache.spark.Logging -import org.apache.spark.serializer.Serializer import org.apache.spark.util.Utils /** diff --git a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala index bc143b7de3..6b3601250a 100644 --- a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala +++ b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala @@ -21,7 +21,7 @@ import java.net.{URI, URL} import javax.servlet.DispatcherType import javax.servlet.http.{HttpServlet, HttpServletRequest, HttpServletResponse} -import scala.collection.mutable.{ArrayBuffer, StringBuilder} +import scala.collection.mutable.ArrayBuffer import scala.language.implicitConversions import scala.xml.Node diff --git a/core/src/main/scala/org/apache/spark/ui/exec/ExecutorThreadDumpPage.scala b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorThreadDumpPage.scala index edc66709e2..cc476d61b5 100644 --- a/core/src/main/scala/org/apache/spark/ui/exec/ExecutorThreadDumpPage.scala +++ b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorThreadDumpPage.scala @@ -19,7 +19,6 @@ package org.apache.spark.ui.exec import javax.servlet.http.HttpServletRequest -import scala.util.Try import scala.xml.{Node, Text} import org.apache.spark.ui.{UIUtils, WebUIPage} diff --git a/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala b/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala index 52587d2188..83ded92609 100644 --- a/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala +++ b/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala @@ -20,7 +20,6 @@ package org.apache.spark.util import java.lang.management.ManagementFactory import java.lang.reflect.{Field, Modifier} import java.util.{IdentityHashMap, Random} -import java.util.concurrent.ConcurrentHashMap import scala.collection.mutable.ArrayBuffer import scala.runtime.ScalaRunTime diff --git a/core/src/main/scala/org/apache/spark/util/collection/AppendOnlyMap.scala b/core/src/main/scala/org/apache/spark/util/collection/AppendOnlyMap.scala index 4c1e161554..6b74a29ace 100644 --- a/core/src/main/scala/org/apache/spark/util/collection/AppendOnlyMap.scala +++ b/core/src/main/scala/org/apache/spark/util/collection/AppendOnlyMap.scala @@ -17,7 +17,7 @@ package org.apache.spark.util.collection -import java.util.{Arrays, Comparator} +import java.util.Comparator import com.google.common.hash.Hashing diff --git a/core/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java b/core/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java index aa15e792e2..1692df7d30 100644 --- a/core/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java +++ b/core/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java @@ -17,9 +17,6 @@ package org.apache.spark.launcher; -import java.io.BufferedReader; -import java.io.InputStream; -import java.io.InputStreamReader; import java.util.Arrays; import java.util.HashMap; import java.util.Map; diff --git a/core/src/test/java/org/apache/spark/serializer/TestJavaSerializerImpl.java b/core/src/test/java/org/apache/spark/serializer/TestJavaSerializerImpl.java index 3d50ab4fab..8aa0636700 100644 --- a/core/src/test/java/org/apache/spark/serializer/TestJavaSerializerImpl.java +++ b/core/src/test/java/org/apache/spark/serializer/TestJavaSerializerImpl.java @@ -21,7 +21,6 @@ import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; -import scala.Option; import scala.reflect.ClassTag; diff --git a/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala index 6546def596..ddf48765ec 100644 --- a/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala +++ b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala @@ -22,7 +22,7 @@ import scala.collection.mutable.ArrayBuffer import org.mockito.Matchers.{any, isA} import org.mockito.Mockito._ -import org.apache.spark.rpc.{RpcAddress, RpcCallContext, RpcEndpointRef, RpcEnv} +import org.apache.spark.rpc.{RpcAddress, RpcCallContext, RpcEnv} import org.apache.spark.scheduler.{CompressedMapStatus, MapStatus} import org.apache.spark.shuffle.FetchFailedException import org.apache.spark.storage.{BlockManagerId, ShuffleBlockId} diff --git a/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala b/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala index fa35819f55..159b448e05 100644 --- a/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala +++ b/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala @@ -20,11 +20,8 @@ package org.apache.spark import java.io.File import javax.net.ssl.SSLContext -import com.google.common.io.Files import org.scalatest.BeforeAndAfterAll -import org.apache.spark.util.Utils - class SSLOptionsSuite extends SparkFunSuite with BeforeAndAfterAll { test("test resolving property file as spark conf ") { diff --git a/core/src/test/scala/org/apache/spark/ThreadingSuite.scala b/core/src/test/scala/org/apache/spark/ThreadingSuite.scala index fc31b784c7..b66aba91c5 100644 --- a/core/src/test/scala/org/apache/spark/ThreadingSuite.scala +++ b/core/src/test/scala/org/apache/spark/ThreadingSuite.scala @@ -17,11 +17,9 @@ package org.apache.spark -import java.util.concurrent.{Semaphore, TimeUnit} +import java.util.concurrent.Semaphore import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger} -import org.apache.spark.scheduler._ - /** * Holds state shared across task threads in some ThreadingSuite tests. */ diff --git a/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala b/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala index f416ace5c2..cbdf1755b0 100644 --- a/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala @@ -22,7 +22,7 @@ import java.net.URL import scala.collection.mutable import scala.io.Source -import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkFunSuite} +import org.apache.spark.{LocalSparkContext, SparkContext, SparkFunSuite} import org.apache.spark.scheduler.{SparkListener, SparkListenerExecutorAdded} import org.apache.spark.scheduler.cluster.ExecutorInfo import org.apache.spark.util.SparkConfWithEnv diff --git a/core/src/test/scala/org/apache/spark/deploy/client/AppClientSuite.scala b/core/src/test/scala/org/apache/spark/deploy/client/AppClientSuite.scala index 658779360b..379c038c55 100644 --- a/core/src/test/scala/org/apache/spark/deploy/client/AppClientSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/client/AppClientSuite.scala @@ -19,7 +19,6 @@ package org.apache.spark.deploy.client import java.util.concurrent.ConcurrentLinkedQueue -import scala.collection.JavaConverters._ import scala.concurrent.duration._ import org.scalatest.BeforeAndAfterAll diff --git a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala index 3baa2e2dda..8e8007f4eb 100644 --- a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala @@ -24,16 +24,14 @@ import java.util.concurrent.TimeUnit import java.util.zip.{ZipInputStream, ZipOutputStream} import scala.concurrent.duration._ -import scala.io.Source import scala.language.postfixOps import com.google.common.base.Charsets import com.google.common.io.{ByteStreams, Files} -import org.apache.hadoop.fs.Path import org.apache.hadoop.hdfs.DistributedFileSystem import org.json4s.jackson.JsonMethods._ import org.mockito.Matchers.any -import org.mockito.Mockito.{doReturn, mock, spy, verify, when} +import org.mockito.Mockito.{mock, spy, verify} import org.scalatest.BeforeAndAfter import org.scalatest.Matchers import org.scalatest.concurrent.Eventually._ @@ -45,8 +43,6 @@ import org.apache.spark.util.{Clock, JsonProtocol, ManualClock, Utils} class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matchers with Logging { - import FsHistoryProvider._ - private var testDir: File = null before { diff --git a/core/src/test/scala/org/apache/spark/input/WholeTextFileRecordReaderSuite.scala b/core/src/test/scala/org/apache/spark/input/WholeTextFileRecordReaderSuite.scala index d852255a4f..88b3a0e964 100644 --- a/core/src/test/scala/org/apache/spark/input/WholeTextFileRecordReaderSuite.scala +++ b/core/src/test/scala/org/apache/spark/input/WholeTextFileRecordReaderSuite.scala @@ -24,7 +24,7 @@ import java.io.FileOutputStream import scala.collection.immutable.IndexedSeq import org.apache.hadoop.io.Text -import org.apache.hadoop.io.compress.{CompressionCodecFactory, DefaultCodec, GzipCodec} +import org.apache.hadoop.io.compress.{CompressionCodecFactory, GzipCodec} import org.scalatest.BeforeAndAfterAll import org.apache.spark.{Logging, SparkConf, SparkContext, SparkFunSuite} diff --git a/core/src/test/scala/org/apache/spark/launcher/LauncherBackendSuite.scala b/core/src/test/scala/org/apache/spark/launcher/LauncherBackendSuite.scala index 639d1daa36..713560d3dd 100644 --- a/core/src/test/scala/org/apache/spark/launcher/LauncherBackendSuite.scala +++ b/core/src/test/scala/org/apache/spark/launcher/LauncherBackendSuite.scala @@ -26,7 +26,6 @@ import org.scalatest.Matchers import org.scalatest.concurrent.Eventually._ import org.apache.spark._ -import org.apache.spark.launcher._ class LauncherBackendSuite extends SparkFunSuite with Matchers { diff --git a/core/src/test/scala/org/apache/spark/memory/TestMemoryManager.scala b/core/src/test/scala/org/apache/spark/memory/TestMemoryManager.scala index e5cb9d3a99..6dad3f4ae7 100644 --- a/core/src/test/scala/org/apache/spark/memory/TestMemoryManager.scala +++ b/core/src/test/scala/org/apache/spark/memory/TestMemoryManager.scala @@ -17,10 +17,8 @@ package org.apache.spark.memory -import scala.collection.mutable - import org.apache.spark.SparkConf -import org.apache.spark.storage.{BlockId, BlockStatus} +import org.apache.spark.storage.BlockId class TestMemoryManager(conf: SparkConf) extends MemoryManager(conf, numCores = 1, Long.MaxValue, Long.MaxValue) { diff --git a/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala b/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala index 2204800388..43e61241b6 100644 --- a/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala +++ b/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala @@ -20,7 +20,7 @@ package org.apache.spark.rpc import java.io.{File, NotSerializableException} import java.nio.charset.StandardCharsets.UTF_8 import java.util.UUID -import java.util.concurrent.{ConcurrentLinkedQueue, CountDownLatch, TimeoutException, TimeUnit} +import java.util.concurrent.{ConcurrentLinkedQueue, CountDownLatch, TimeUnit} import scala.collection.mutable import scala.collection.JavaConverters._ diff --git a/core/src/test/scala/org/apache/spark/rpc/netty/InboxSuite.scala b/core/src/test/scala/org/apache/spark/rpc/netty/InboxSuite.scala index 12113be75c..e5539566e4 100644 --- a/core/src/test/scala/org/apache/spark/rpc/netty/InboxSuite.scala +++ b/core/src/test/scala/org/apache/spark/rpc/netty/InboxSuite.scala @@ -23,7 +23,7 @@ import java.util.concurrent.atomic.AtomicInteger import org.mockito.Mockito._ import org.apache.spark.SparkFunSuite -import org.apache.spark.rpc.{RpcAddress, RpcEndpoint, RpcEnv, TestRpcEndpoint} +import org.apache.spark.rpc.{RpcAddress, TestRpcEndpoint} class InboxSuite extends SparkFunSuite { diff --git a/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackendSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackendSuite.scala index 5db7535d36..2df05401f3 100644 --- a/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackendSuite.scala +++ b/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackendSuite.scala @@ -33,7 +33,7 @@ import org.scalatest.BeforeAndAfter import org.apache.spark.{LocalSparkContext, SecurityManager, SparkConf, SparkContext, SparkFunSuite} import org.apache.spark.network.shuffle.mesos.MesosExternalShuffleClient -import org.apache.spark.rpc.{RpcEndpointRef} +import org.apache.spark.rpc.RpcEndpointRef import org.apache.spark.scheduler.TaskSchedulerImpl class CoarseMesosSchedulerBackendSuite extends SparkFunSuite diff --git a/core/src/test/scala/org/apache/spark/ui/UISuite.scala b/core/src/test/scala/org/apache/spark/ui/UISuite.scala index 69c46058f1..2b59b48d8b 100644 --- a/core/src/test/scala/org/apache/spark/ui/UISuite.scala +++ b/core/src/test/scala/org/apache/spark/ui/UISuite.scala @@ -21,7 +21,6 @@ import java.net.{BindException, ServerSocket} import scala.io.Source -import org.eclipse.jetty.server.Server import org.eclipse.jetty.servlet.ServletContextHandler import org.scalatest.concurrent.Eventually._ import org.scalatest.time.SpanSugar._ diff --git a/core/src/test/scala/org/apache/spark/util/ResetSystemProperties.scala b/core/src/test/scala/org/apache/spark/util/ResetSystemProperties.scala index 60fb7abb66..75e4504850 100644 --- a/core/src/test/scala/org/apache/spark/util/ResetSystemProperties.scala +++ b/core/src/test/scala/org/apache/spark/util/ResetSystemProperties.scala @@ -22,8 +22,6 @@ import java.util.Properties import org.apache.commons.lang3.SerializationUtils import org.scalatest.{BeforeAndAfterEach, Suite} -import org.apache.spark.SparkFunSuite - /** * Mixin for automatically resetting system properties that are modified in ScalaTest tests. * This resets the properties after each individual test. diff --git a/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala b/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala index 49088aa0a5..c342b68f46 100644 --- a/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.util import scala.collection.mutable.ArrayBuffer -import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, PrivateMethodTester} +import org.scalatest.{BeforeAndAfterEach, PrivateMethodTester} import org.apache.spark.SparkFunSuite -- cgit v1.2.3