aboutsummaryrefslogtreecommitdiff
path: root/project
diff options
context:
space:
mode:
authorXiangrui Meng <meng@databricks.com>2016-06-22 15:52:37 -0700
committerXiangrui Meng <meng@databricks.com>2016-06-22 15:52:37 -0700
commit857ecff1d8268b28bb287e47cda370c87afe9d41 (patch)
tree593cc3458484dc2858219d0ba2acc35a5bb5ab05 /project
parent00cc5cca4522297b63b1522a2b8643b1a098e2b3 (diff)
downloadspark-857ecff1d8268b28bb287e47cda370c87afe9d41.tar.gz
spark-857ecff1d8268b28bb287e47cda370c87afe9d41.tar.bz2
spark-857ecff1d8268b28bb287e47cda370c87afe9d41.zip
[SPARK-16155][DOC] remove package grouping in Java docs
## What changes were proposed in this pull request? In 1.4 and earlier releases, we have package grouping in the generated Java API docs. See http://spark.apache.org/docs/1.4.0/api/java/index.html. However, this disappeared in 1.5.0: http://spark.apache.org/docs/1.5.0/api/java/index.html. Rather than fixing it, I'd suggest removing grouping. Because it might take some time to fix and it is a manual process to update the grouping in `SparkBuild.scala`. I didn't find anyone complaining about missing groups since 1.5.0 on Google. Manually checked the generated Java API docs and confirmed that they are the same as in master. Author: Xiangrui Meng <meng@databricks.com> Closes #13856 from mengxr/SPARK-16155.
Diffstat (limited to 'project')
-rw-r--r--project/SparkBuild.scala20
1 files changed, 0 insertions, 20 deletions
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index bce7f1d69b..4b44469576 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -684,11 +684,6 @@ object Unidoc {
import sbtunidoc.Plugin._
import UnidocKeys._
- // for easier specification of JavaDoc package groups
- private def packageList(names: String*): String = {
- names.map(s => "org.apache.spark." + s).mkString(":")
- }
-
private def ignoreUndocumentedPackages(packages: Seq[Seq[File]]): Seq[Seq[File]] = {
packages
.map(_.filterNot(_.getName.contains("$")))
@@ -731,21 +726,6 @@ object Unidoc {
javacOptions in doc := Seq(
"-windowtitle", "Spark " + version.value.replaceAll("-SNAPSHOT", "") + " JavaDoc",
"-public",
- "-group", "Core Java API", packageList("api.java", "api.java.function"),
- "-group", "Spark Streaming", packageList(
- "streaming.api.java", "streaming.flume", "streaming.kafka", "streaming.kinesis"
- ),
- "-group", "MLlib", packageList(
- "mllib.classification", "mllib.clustering", "mllib.evaluation.binary", "mllib.linalg",
- "mllib.linalg.distributed", "mllib.optimization", "mllib.rdd", "mllib.recommendation",
- "mllib.regression", "mllib.stat", "mllib.tree", "mllib.tree.configuration",
- "mllib.tree.impurity", "mllib.tree.model", "mllib.util",
- "mllib.evaluation", "mllib.feature", "mllib.random", "mllib.stat.correlation",
- "mllib.stat.test", "mllib.tree.impl", "mllib.tree.loss",
- "ml", "ml.attribute", "ml.classification", "ml.clustering", "ml.evaluation", "ml.feature",
- "ml.param", "ml.recommendation", "ml.regression", "ml.tuning"
- ),
- "-group", "Spark SQL", packageList("sql.api.java", "sql.api.java.types", "sql.hive.api.java"),
"-noqualifier", "java.lang"
),