aboutsummaryrefslogtreecommitdiff
path: root/core/src/main/scala
diff options
context:
space:
mode:
Diffstat (limited to 'core/src/main/scala')
-rw-r--r--core/src/main/scala/org/apache/spark/SecurityManager.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/SparkContext.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/ExternalShuffleServiceSource.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/worker/ui/LogPage.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala17
-rw-r--r--core/src/main/scala/org/apache/spark/internal/config/ConfigReader.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/rpc/RpcTimeout.scala5
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/Stage.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/Task.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/serializer/Serializer.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/serializer/SerializerManager.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/status/api/v1/ExecutorListResource.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/storage/StorageLevel.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/util/random/RandomSampler.scala1
17 files changed, 12 insertions, 32 deletions
diff --git a/core/src/main/scala/org/apache/spark/SecurityManager.scala b/core/src/main/scala/org/apache/spark/SecurityManager.scala
index 87fe563152..9bdc5096b6 100644
--- a/core/src/main/scala/org/apache/spark/SecurityManager.scala
+++ b/core/src/main/scala/org/apache/spark/SecurityManager.scala
@@ -26,11 +26,9 @@ import javax.net.ssl._
import com.google.common.hash.HashCodes
import com.google.common.io.Files
import org.apache.hadoop.io.Text
-import org.apache.hadoop.security.Credentials
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.internal.Logging
-import org.apache.spark.internal.config._
import org.apache.spark.network.sasl.SecretKeyHolder
import org.apache.spark.util.Utils
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index efb5f9d501..869c5d7094 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -19,7 +19,7 @@ package org.apache.spark
import java.io._
import java.lang.reflect.Constructor
-import java.net.{URI}
+import java.net.URI
import java.util.{Arrays, Locale, Properties, ServiceLoader, UUID}
import java.util.concurrent.{ConcurrentHashMap, ConcurrentMap}
import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger, AtomicReference}
diff --git a/core/src/main/scala/org/apache/spark/deploy/ExternalShuffleServiceSource.scala b/core/src/main/scala/org/apache/spark/deploy/ExternalShuffleServiceSource.scala
index e917679c83..357a976931 100644
--- a/core/src/main/scala/org/apache/spark/deploy/ExternalShuffleServiceSource.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/ExternalShuffleServiceSource.scala
@@ -19,7 +19,7 @@ package org.apache.spark.deploy
import javax.annotation.concurrent.ThreadSafe
-import com.codahale.metrics.{Gauge, MetricRegistry}
+import com.codahale.metrics.MetricRegistry
import org.apache.spark.metrics.source.Source
import org.apache.spark.network.shuffle.ExternalShuffleBlockHandler
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
index a980144a75..5ffdedd165 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
@@ -17,7 +17,7 @@
package org.apache.spark.deploy
-import java.io.{File, IOException, PrintStream}
+import java.io.{File, IOException}
import java.lang.reflect.{InvocationTargetException, Modifier, UndeclaredThrowableException}
import java.net.URL
import java.security.PrivilegedExceptionAction
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/ui/LogPage.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ui/LogPage.scala
index 465c214362..80dc9bf877 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/ui/LogPage.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/ui/LogPage.scala
@@ -22,8 +22,6 @@ import javax.servlet.http.HttpServletRequest
import scala.xml.{Node, Unparsed}
-import com.google.common.cache.{CacheBuilder, CacheLoader, LoadingCache}
-
import org.apache.spark.internal.Logging
import org.apache.spark.ui.{UIUtils, WebUIPage}
import org.apache.spark.util.Utils
diff --git a/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala b/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala
index 113037d1ab..4f3e42bb3c 100644
--- a/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala
+++ b/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala
@@ -17,12 +17,6 @@
package org.apache.spark.internal.config
-import java.util.{Map => JMap}
-
-import scala.util.matching.Regex
-
-import org.apache.spark.SparkConf
-
/**
* An entry contains all meta information for a configuration.
*
@@ -34,7 +28,6 @@ import org.apache.spark.SparkConf
* value declared as a string.
*
* @param key the key for the configuration
- * @param defaultValue the default value for the configuration
* @param valueConverter how to convert a string to the value. It should throw an exception if the
* string does not have the required format.
* @param stringConverter how to convert a value to a string that the user can use it as a valid
@@ -76,7 +69,7 @@ private class ConfigEntryWithDefault[T] (
stringConverter: T => String,
doc: String,
isPublic: Boolean)
- extends ConfigEntry(key, valueConverter, stringConverter, doc, isPublic) {
+ extends ConfigEntry(key, valueConverter, stringConverter, doc, isPublic) {
override def defaultValue: Option[T] = Some(_defaultValue)
@@ -95,7 +88,7 @@ private class ConfigEntryWithDefaultString[T] (
stringConverter: T => String,
doc: String,
isPublic: Boolean)
- extends ConfigEntry(key, valueConverter, stringConverter, doc, isPublic) {
+ extends ConfigEntry(key, valueConverter, stringConverter, doc, isPublic) {
override def defaultValue: Option[T] = Some(valueConverter(_defaultValue))
@@ -118,8 +111,8 @@ private[spark] class OptionalConfigEntry[T](
val rawStringConverter: T => String,
doc: String,
isPublic: Boolean)
- extends ConfigEntry[Option[T]](key, s => Some(rawValueConverter(s)),
- v => v.map(rawStringConverter).orNull, doc, isPublic) {
+ extends ConfigEntry[Option[T]](key, s => Some(rawValueConverter(s)),
+ v => v.map(rawStringConverter).orNull, doc, isPublic) {
override def defaultValueString: String = "<undefined>"
@@ -137,7 +130,7 @@ private class FallbackConfigEntry[T] (
doc: String,
isPublic: Boolean,
private[config] val fallback: ConfigEntry[T])
- extends ConfigEntry[T](key, fallback.valueConverter, fallback.stringConverter, doc, isPublic) {
+ extends ConfigEntry[T](key, fallback.valueConverter, fallback.stringConverter, doc, isPublic) {
override def defaultValueString: String = s"<value of ${fallback.key}>"
diff --git a/core/src/main/scala/org/apache/spark/internal/config/ConfigReader.scala b/core/src/main/scala/org/apache/spark/internal/config/ConfigReader.scala
index bb1a3bb5fc..c62de9bfd8 100644
--- a/core/src/main/scala/org/apache/spark/internal/config/ConfigReader.scala
+++ b/core/src/main/scala/org/apache/spark/internal/config/ConfigReader.scala
@@ -18,7 +18,6 @@
package org.apache.spark.internal.config
import java.util.{Map => JMap}
-import java.util.regex.Pattern
import scala.collection.mutable.HashMap
import scala.util.matching.Regex
diff --git a/core/src/main/scala/org/apache/spark/rpc/RpcTimeout.scala b/core/src/main/scala/org/apache/spark/rpc/RpcTimeout.scala
index efd26486ab..2c9a976e76 100644
--- a/core/src/main/scala/org/apache/spark/rpc/RpcTimeout.scala
+++ b/core/src/main/scala/org/apache/spark/rpc/RpcTimeout.scala
@@ -19,11 +19,10 @@ package org.apache.spark.rpc
import java.util.concurrent.TimeoutException
-import scala.concurrent.{Await, Future}
+import scala.concurrent.Future
import scala.concurrent.duration._
-import scala.util.control.NonFatal
-import org.apache.spark.{SparkConf, SparkException}
+import org.apache.spark.SparkConf
import org.apache.spark.util.{ThreadUtils, Utils}
/**
diff --git a/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala b/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala
index 6abdf0fd53..e36c759a42 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala
@@ -24,7 +24,6 @@ import java.util.Properties
import org.apache.spark._
import org.apache.spark.broadcast.Broadcast
-import org.apache.spark.executor.TaskMetrics
import org.apache.spark.rdd.RDD
/**
diff --git a/core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala b/core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala
index 994b81e062..7a25c47e2c 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala
@@ -25,7 +25,6 @@ import scala.language.existentials
import org.apache.spark._
import org.apache.spark.broadcast.Broadcast
-import org.apache.spark.executor.TaskMetrics
import org.apache.spark.internal.Logging
import org.apache.spark.rdd.RDD
import org.apache.spark.shuffle.ShuffleWriter
diff --git a/core/src/main/scala/org/apache/spark/scheduler/Stage.scala b/core/src/main/scala/org/apache/spark/scheduler/Stage.scala
index 2f972b064b..c628dd38d0 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/Stage.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/Stage.scala
@@ -19,7 +19,6 @@ package org.apache.spark.scheduler
import scala.collection.mutable.HashSet
-import org.apache.spark._
import org.apache.spark.executor.TaskMetrics
import org.apache.spark.internal.Logging
import org.apache.spark.rdd.RDD
diff --git a/core/src/main/scala/org/apache/spark/scheduler/Task.scala b/core/src/main/scala/org/apache/spark/scheduler/Task.scala
index 51976f666d..7b726d5659 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/Task.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/Task.scala
@@ -46,7 +46,6 @@ import org.apache.spark.util._
* @param stageId id of the stage this task belongs to
* @param stageAttemptId attempt id of the stage this task belongs to
* @param partitionId index of the number in the RDD
- * @param metrics a `TaskMetrics` that is created at driver side and sent to executor side.
* @param localProperties copy of thread-local properties set by the user on the driver side.
* @param serializedTaskMetrics a `TaskMetrics` that is created and serialized on the driver side
* and sent to executor side.
diff --git a/core/src/main/scala/org/apache/spark/serializer/Serializer.scala b/core/src/main/scala/org/apache/spark/serializer/Serializer.scala
index afe6cd8605..008b038789 100644
--- a/core/src/main/scala/org/apache/spark/serializer/Serializer.scala
+++ b/core/src/main/scala/org/apache/spark/serializer/Serializer.scala
@@ -23,7 +23,6 @@ import javax.annotation.concurrent.NotThreadSafe
import scala.reflect.ClassTag
-import org.apache.spark.SparkEnv
import org.apache.spark.annotation.{DeveloperApi, Private}
import org.apache.spark.util.NextIterator
diff --git a/core/src/main/scala/org/apache/spark/serializer/SerializerManager.scala b/core/src/main/scala/org/apache/spark/serializer/SerializerManager.scala
index 686305e933..748f0a30ad 100644
--- a/core/src/main/scala/org/apache/spark/serializer/SerializerManager.scala
+++ b/core/src/main/scala/org/apache/spark/serializer/SerializerManager.scala
@@ -23,7 +23,6 @@ import java.nio.ByteBuffer
import scala.reflect.ClassTag
import org.apache.spark.SparkConf
-import org.apache.spark.internal.config._
import org.apache.spark.io.CompressionCodec
import org.apache.spark.security.CryptoStreamUtils
import org.apache.spark.storage._
diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/ExecutorListResource.scala b/core/src/main/scala/org/apache/spark/status/api/v1/ExecutorListResource.scala
index 6ca59c2f3c..ab53881594 100644
--- a/core/src/main/scala/org/apache/spark/status/api/v1/ExecutorListResource.scala
+++ b/core/src/main/scala/org/apache/spark/status/api/v1/ExecutorListResource.scala
@@ -16,7 +16,7 @@
*/
package org.apache.spark.status.api.v1
-import javax.ws.rs.{GET, PathParam, Produces}
+import javax.ws.rs.{GET, Produces}
import javax.ws.rs.core.MediaType
import org.apache.spark.ui.SparkUI
diff --git a/core/src/main/scala/org/apache/spark/storage/StorageLevel.scala b/core/src/main/scala/org/apache/spark/storage/StorageLevel.scala
index fad0404beb..4c6998d7a8 100644
--- a/core/src/main/scala/org/apache/spark/storage/StorageLevel.scala
+++ b/core/src/main/scala/org/apache/spark/storage/StorageLevel.scala
@@ -31,7 +31,7 @@ import org.apache.spark.util.Utils
* ExternalBlockStore, whether to keep the data in memory in a serialized format, and whether
* to replicate the RDD partitions on multiple nodes.
*
- * The [[org.apache.spark.storage.StorageLevel$]] singleton object contains some static constants
+ * The [[org.apache.spark.storage.StorageLevel]] singleton object contains some static constants
* for commonly useful storage levels. To create your own storage level object, use the
* factory method of the singleton object (`StorageLevel(...)`).
*/
diff --git a/core/src/main/scala/org/apache/spark/util/random/RandomSampler.scala b/core/src/main/scala/org/apache/spark/util/random/RandomSampler.scala
index 8c67364ef1..ea99a7e5b4 100644
--- a/core/src/main/scala/org/apache/spark/util/random/RandomSampler.scala
+++ b/core/src/main/scala/org/apache/spark/util/random/RandomSampler.scala
@@ -19,7 +19,6 @@ package org.apache.spark.util.random
import java.util.Random
-import scala.collection.mutable.ArrayBuffer
import scala.reflect.ClassTag
import org.apache.commons.math3.distribution.PoissonDistribution