aboutsummaryrefslogtreecommitdiff
path: root/sql/core
diff options
context:
space:
mode:
authorhyukjinkwon <gurwls223@gmail.com>2017-01-17 12:28:15 +0000
committerSean Owen <sowen@cloudera.com>2017-01-17 12:28:15 +0000
commit6c00c069e3c3f5904abd122cea1d56683031cca0 (patch)
tree3d7a64d4cbedd007a217d0387919ce1fe72a0d2e /sql/core
parent0019005a2d0f150fd00ad926d054a8beca4bbd68 (diff)
downloadspark-6c00c069e3c3f5904abd122cea1d56683031cca0.tar.gz
spark-6c00c069e3c3f5904abd122cea1d56683031cca0.tar.bz2
spark-6c00c069e3c3f5904abd122cea1d56683031cca0.zip
[SPARK-3249][DOC] Fix links in ScalaDoc that cause warning messages in `sbt/sbt unidoc`
## What changes were proposed in this pull request? This PR proposes to fix ambiguous link warnings by simply making them as code blocks for both javadoc and scaladoc. ``` [warn] .../spark/core/src/main/scala/org/apache/spark/Accumulator.scala:20: The link target "SparkContext#accumulator" is ambiguous. Several members fit the target: [warn] .../spark/mllib/src/main/scala/org/apache/spark/mllib/optimization/GradientDescent.scala:281: The link target "runMiniBatchSGD" is ambiguous. Several members fit the target: [warn] .../spark/mllib/src/main/scala/org/apache/spark/mllib/fpm/AssociationRules.scala:83: The link target "run" is ambiguous. Several members fit the target: ... ``` This PR also fixes javadoc8 break as below: ``` [error] .../spark/sql/core/target/java/org/apache/spark/sql/LowPrioritySQLImplicits.java:7: error: reference not found [error] * newProductEncoder - to disambiguate for {link List}s which are both {link Seq} and {link Product} [error] ^ [error] .../spark/sql/core/target/java/org/apache/spark/sql/LowPrioritySQLImplicits.java:7: error: reference not found [error] * newProductEncoder - to disambiguate for {link List}s which are both {link Seq} and {link Product} [error] ^ [error] .../spark/sql/core/target/java/org/apache/spark/sql/LowPrioritySQLImplicits.java:7: error: reference not found [error] * newProductEncoder - to disambiguate for {link List}s which are both {link Seq} and {link Product} [error] ^ [info] 3 errors ``` ## How was this patch tested? Manually via `sbt unidoc > output.txt` and the checked it via `cat output.txt | grep ambiguous` and `sbt unidoc | grep error`. Author: hyukjinkwon <gurwls223@gmail.com> Closes #16604 from HyukjinKwon/SPARK-3249.
Diffstat (limited to 'sql/core')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SQLImplicits.scala2
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/functions.scala8
2 files changed, 5 insertions, 5 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLImplicits.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLImplicits.scala
index 2caf723669..375df64d39 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SQLImplicits.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLImplicits.scala
@@ -259,7 +259,7 @@ abstract class SQLImplicits extends LowPrioritySQLImplicits {
* Conflicting implicits are placed here to disambiguate resolution.
*
* Reasons for including specific implicits:
- * newProductEncoder - to disambiguate for [[List]]s which are both [[Seq]] and [[Product]]
+ * newProductEncoder - to disambiguate for `List`s which are both `Seq` and `Product`
*/
trait LowPrioritySQLImplicits {
/** @since 1.6.0 */
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/functions.scala b/sql/core/src/main/scala/org/apache/spark/sql/functions.scala
index 9a080fd3c9..cabe1f4563 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/functions.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/functions.scala
@@ -633,7 +633,7 @@ object functions {
def skewness(columnName: String): Column = skewness(Column(columnName))
/**
- * Aggregate function: alias for [[stddev_samp]].
+ * Aggregate function: alias for `stddev_samp`.
*
* @group agg_funcs
* @since 1.6.0
@@ -641,7 +641,7 @@ object functions {
def stddev(e: Column): Column = withAggregateFunction { StddevSamp(e.expr) }
/**
- * Aggregate function: alias for [[stddev_samp]].
+ * Aggregate function: alias for `stddev_samp`.
*
* @group agg_funcs
* @since 1.6.0
@@ -717,7 +717,7 @@ object functions {
def sumDistinct(columnName: String): Column = sumDistinct(Column(columnName))
/**
- * Aggregate function: alias for [[var_samp]].
+ * Aggregate function: alias for `var_samp`.
*
* @group agg_funcs
* @since 1.6.0
@@ -725,7 +725,7 @@ object functions {
def variance(e: Column): Column = withAggregateFunction { VarianceSamp(e.expr) }
/**
- * Aggregate function: alias for [[var_samp]].
+ * Aggregate function: alias for `var_samp`.
*
* @group agg_funcs
* @since 1.6.0