aboutsummaryrefslogtreecommitdiff
path: root/project
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2015-03-02 22:14:08 -0800
committerMichael Armbrust <michael@databricks.com>2015-03-02 22:14:08 -0800
commit54d19689ff8d786acde5b8ada6741854ffadadea (patch)
treeb0a2a68c3e8ea6a7f0209fa6a12d02d1c38b3c71 /project
parent12599942e69e4d73040f3a8611661a0862514ffc (diff)
downloadspark-54d19689ff8d786acde5b8ada6741854ffadadea.tar.gz
spark-54d19689ff8d786acde5b8ada6741854ffadadea.tar.bz2
spark-54d19689ff8d786acde5b8ada6741854ffadadea.zip
[SPARK-5310][SQL] Fixes to Docs and Datasources API
- Various Fixes to docs - Make data source traits actually interfaces Based on #4862 but with fixed conflicts. Author: Reynold Xin <rxin@databricks.com> Author: Michael Armbrust <michael@databricks.com> Closes #4868 from marmbrus/pr/4862 and squashes the following commits: fe091ea [Michael Armbrust] Merge remote-tracking branch 'origin/master' into pr/4862 0208497 [Reynold Xin] Test fixes. 34e0a28 [Reynold Xin] [SPARK-5310][SQL] Various fixes to Spark SQL docs.
Diffstat (limited to 'project')
-rw-r--r--project/SparkBuild.scala29
1 files changed, 17 insertions, 12 deletions
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index e4b1b96527..4f17df59f4 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -357,6 +357,21 @@ object Unidoc {
names.map(s => "org.apache.spark." + s).mkString(":")
}
+ private def ignoreUndocumentedPackages(packages: Seq[Seq[File]]): Seq[Seq[File]] = {
+ packages
+ .map(_.filterNot(_.getName.contains("$")))
+ .map(_.filterNot(_.getCanonicalPath.contains("akka")))
+ .map(_.filterNot(_.getCanonicalPath.contains("deploy")))
+ .map(_.filterNot(_.getCanonicalPath.contains("network")))
+ .map(_.filterNot(_.getCanonicalPath.contains("shuffle")))
+ .map(_.filterNot(_.getCanonicalPath.contains("executor")))
+ .map(_.filterNot(_.getCanonicalPath.contains("python")))
+ .map(_.filterNot(_.getCanonicalPath.contains("collection")))
+ .map(_.filterNot(_.getCanonicalPath.contains("sql/catalyst")))
+ .map(_.filterNot(_.getCanonicalPath.contains("sql/execution")))
+ .map(_.filterNot(_.getCanonicalPath.contains("sql/hive/test")))
+ }
+
lazy val settings = scalaJavaUnidocSettings ++ Seq (
publish := {},
@@ -368,22 +383,12 @@ object Unidoc {
// Skip actual catalyst, but include the subproject.
// Catalyst is not public API and contains quasiquotes which break scaladoc.
unidocAllSources in (ScalaUnidoc, unidoc) := {
- (unidocAllSources in (ScalaUnidoc, unidoc)).value
- .map(_.filterNot(_.getCanonicalPath.contains("sql/catalyst")))
+ ignoreUndocumentedPackages((unidocAllSources in (ScalaUnidoc, unidoc)).value)
},
// Skip class names containing $ and some internal packages in Javadocs
unidocAllSources in (JavaUnidoc, unidoc) := {
- (unidocAllSources in (JavaUnidoc, unidoc)).value
- .map(_.filterNot(_.getName.contains("$")))
- .map(_.filterNot(_.getCanonicalPath.contains("akka")))
- .map(_.filterNot(_.getCanonicalPath.contains("deploy")))
- .map(_.filterNot(_.getCanonicalPath.contains("network")))
- .map(_.filterNot(_.getCanonicalPath.contains("shuffle")))
- .map(_.filterNot(_.getCanonicalPath.contains("executor")))
- .map(_.filterNot(_.getCanonicalPath.contains("python")))
- .map(_.filterNot(_.getCanonicalPath.contains("collection")))
- .map(_.filterNot(_.getCanonicalPath.contains("sql/catalyst")))
+ ignoreUndocumentedPackages((unidocAllSources in (JavaUnidoc, unidoc)).value)
},
// Javadoc options: create a window title, and group key packages on index page