aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorDongjoon Hyun <dongjoon@apache.org>2016-04-06 16:02:55 -0700
committerAndrew Or <andrew@databricks.com>2016-04-06 16:02:55 -0700
commitd717ae1fd74d125a9df21350a70e7c2b2d2b4786 (patch)
tree7fea2e27627c52cd6ffa04421df66068e5b28881 /core
parent457e58befe8cb7c346e54b344a45fa357b68cfc0 (diff)
downloadspark-d717ae1fd74d125a9df21350a70e7c2b2d2b4786.tar.gz
spark-d717ae1fd74d125a9df21350a70e7c2b2d2b4786.tar.bz2
spark-d717ae1fd74d125a9df21350a70e7c2b2d2b4786.zip
[SPARK-14444][BUILD] Add a new scalastyle `NoScalaDoc` to prevent ScalaDoc-style multiline comments
## What changes were proposed in this pull request? According to the [Spark Code Style Guide](https://cwiki.apache.org/confluence/display/SPARK/Spark+Code+Style+Guide#SparkCodeStyleGuide-Indentation), this PR adds a new scalastyle rule to prevent the followings. ``` /** In Spark, we don't use the ScalaDoc style so this * is not correct. */ ``` ## How was this patch tested? Pass the Jenkins tests (including `lint-scala`). Author: Dongjoon Hyun <dongjoon@apache.org> Closes #12221 from dongjoon-hyun/SPARK-14444.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/SparkConf.scala6
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala12
-rw-r--r--core/src/main/scala/org/apache/spark/partial/BoundedDouble.scala4
3 files changed, 12 insertions, 10 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala
index 5da2e98f1f..e0fd248c43 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -419,8 +419,10 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
*/
private[spark] def getenv(name: String): String = System.getenv(name)
- /** Checks for illegal or deprecated config settings. Throws an exception for the former. Not
- * idempotent - may mutate this conf object to convert deprecated settings to supported ones. */
+ /**
+ * Checks for illegal or deprecated config settings. Throws an exception for the former. Not
+ * idempotent - may mutate this conf object to convert deprecated settings to supported ones.
+ */
private[spark] def validateSettings() {
if (contains("spark.local.dir")) {
val msg = "In Spark 1.0 and later spark.local.dir will be overridden by the value set by " +
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
index 4e8e363635..41ac308808 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
@@ -76,9 +76,9 @@ class SparkHadoopUtil extends Logging {
/**
- * Appends S3-specific, spark.hadoop.*, and spark.buffer.size configurations to a Hadoop
- * configuration.
- */
+ * Appends S3-specific, spark.hadoop.*, and spark.buffer.size configurations to a Hadoop
+ * configuration.
+ */
def appendS3AndSparkHadoopConfigurations(conf: SparkConf, hadoopConf: Configuration): Unit = {
// Note: this null check is around more than just access to the "conf" object to maintain
// the behavior of the old implementation of this code, for backwards compatibility.
@@ -108,9 +108,9 @@ class SparkHadoopUtil extends Logging {
}
/**
- * Return an appropriate (subclass) of Configuration. Creating config can initializes some Hadoop
- * subsystems.
- */
+ * Return an appropriate (subclass) of Configuration. Creating config can initializes some Hadoop
+ * subsystems.
+ */
def newConfiguration(conf: SparkConf): Configuration = {
val hadoopConf = new Configuration()
appendS3AndSparkHadoopConfigurations(conf, hadoopConf)
diff --git a/core/src/main/scala/org/apache/spark/partial/BoundedDouble.scala b/core/src/main/scala/org/apache/spark/partial/BoundedDouble.scala
index d06b2c67d2..c562c70aba 100644
--- a/core/src/main/scala/org/apache/spark/partial/BoundedDouble.scala
+++ b/core/src/main/scala/org/apache/spark/partial/BoundedDouble.scala
@@ -28,8 +28,8 @@ class BoundedDouble(val mean: Double, val confidence: Double, val low: Double, v
this.mean.hashCode ^ this.confidence.hashCode ^ this.low.hashCode ^ this.high.hashCode
/**
- * Note that consistent with Double, any NaN value will make equality false
- */
+ * Note that consistent with Double, any NaN value will make equality false
+ */
override def equals(that: Any): Boolean =
that match {
case that: BoundedDouble => {