diff options
author | Jacek Laskowski <jacek.laskowski@deepsense.io> | 2015-10-25 01:33:22 +0100 |
---|---|---|
committer | Sean Owen <sowen@cloudera.com> | 2015-10-25 01:33:22 +0100 |
commit | 146da0d8100490a6e49a6c076ec253cdaf9f8905 (patch) | |
tree | 21fa5d9ed6d464ac59ed7eb58e4627ad98c67fcc | |
parent | 28132ceb10d0c127495ce8cb36135e1cb54164d7 (diff) | |
download | spark-146da0d8100490a6e49a6c076ec253cdaf9f8905.tar.gz spark-146da0d8100490a6e49a6c076ec253cdaf9f8905.tar.bz2 spark-146da0d8100490a6e49a6c076ec253cdaf9f8905.zip |
Fix typos
Two typos squashed.
BTW Let me know how to proceed with other typos if I ran across any. I don't feel well to leave them aside as much as sending pull requests with such tiny changes. Guide me.
Author: Jacek Laskowski <jacek.laskowski@deepsense.io>
Closes #9250 from jaceklaskowski/typos-hunting.
4 files changed, 5 insertions, 4 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala index 58d3b846fd..f023e4b21c 100644 --- a/core/src/main/scala/org/apache/spark/SparkConf.scala +++ b/core/src/main/scala/org/apache/spark/SparkConf.scala @@ -621,7 +621,7 @@ private[spark] object SparkConf extends Logging { /** * Return whether the given config should be passed to an executor on start-up. * - * Certain akka and authentication configs are required of the executor when it connects to + * Certain akka and authentication configs are required from the executor when it connects to * the scheduler, while the rest of the spark configs can be inherited from the driver later. */ def isExecutorStartupConf(name: String): Boolean = { diff --git a/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala b/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala index 48afe3ae35..fdf76d312d 100644 --- a/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala +++ b/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala @@ -197,7 +197,7 @@ private[spark] class MetricsSystem private ( } } catch { case e: Exception => { - logError("Sink class " + classPath + " cannot be instantialized") + logError("Sink class " + classPath + " cannot be instantiated") throw e } } diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala index f25f3ed0d9..cb9a300810 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala @@ -22,7 +22,8 @@ import org.apache.spark.executor.TaskMetrics import org.apache.spark.storage.BlockManagerId /** - * Low-level task scheduler interface, currently implemented exclusively by TaskSchedulerImpl. + * Low-level task scheduler interface, currently implemented exclusively by + * [[org.apache.spark.scheduler.TaskSchedulerImpl]]. * This interface allows plugging in different task schedulers. Each TaskScheduler schedules tasks * for a single SparkContext. These schedulers get sets of tasks submitted to them from the * DAGScheduler for each stage, and are responsible for sending the tasks to the cluster, running diff --git a/core/src/main/scala/org/apache/spark/util/ThreadUtils.scala b/core/src/main/scala/org/apache/spark/util/ThreadUtils.scala index 15e7519d70..53283448c8 100644 --- a/core/src/main/scala/org/apache/spark/util/ThreadUtils.scala +++ b/core/src/main/scala/org/apache/spark/util/ThreadUtils.scala @@ -80,7 +80,7 @@ private[spark] object ThreadUtils { } /** - * Wrapper over newSingleThreadScheduledExecutor. + * Wrapper over ScheduledThreadPoolExecutor. */ def newDaemonSingleThreadScheduledExecutor(threadName: String): ScheduledExecutorService = { val threadFactory = new ThreadFactoryBuilder().setDaemon(true).setNameFormat(threadName).build() |