aboutsummaryrefslogtreecommitdiff
path: root/project
diff options
context:
space:
mode:
authorPrashant Sharma <prashant.s@imaginea.com>2014-08-07 16:24:22 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-08-07 16:24:22 -0700
commit32096c2aed9978cfb9a904b4f56bb61800d17e9e (patch)
treecd748098b0be9cf10eb2c6909c7d780ed9e4c29b /project
parent80ec5bad1311651fe56e1d5178090dc63753233b (diff)
downloadspark-32096c2aed9978cfb9a904b4f56bb61800d17e9e.tar.gz
spark-32096c2aed9978cfb9a904b4f56bb61800d17e9e.tar.bz2
spark-32096c2aed9978cfb9a904b4f56bb61800d17e9e.zip
SPARK-2899 Doc generation is back to working in new SBT Build.
The reason for this bug was introduciton of OldDeps project. It had to be excluded to prevent unidocs from trying to put it on "docs compile" classpath. Author: Prashant Sharma <prashant.s@imaginea.com> Closes #1830 from ScrapCodes/doc-fix and squashes the following commits: e5d52e6 [Prashant Sharma] SPARK-2899 Doc generation is back to working in new SBT Build.
Diffstat (limited to 'project')
-rw-r--r--project/SparkBuild.scala60
-rw-r--r--project/plugins.sbt2
2 files changed, 34 insertions, 28 deletions
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index ed587783d5..63a285b81a 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -30,11 +30,11 @@ object BuildCommons {
private val buildLocation = file(".").getAbsoluteFile.getParentFile
- val allProjects@Seq(bagel, catalyst, core, graphx, hive, hiveThriftServer, mllib, repl, spark,
+ val allProjects@Seq(bagel, catalyst, core, graphx, hive, hiveThriftServer, mllib, repl,
sql, streaming, streamingFlumeSink, streamingFlume, streamingKafka, streamingMqtt,
streamingTwitter, streamingZeromq) =
Seq("bagel", "catalyst", "core", "graphx", "hive", "hive-thriftserver", "mllib", "repl",
- "spark", "sql", "streaming", "streaming-flume-sink", "streaming-flume", "streaming-kafka",
+ "sql", "streaming", "streaming-flume-sink", "streaming-flume", "streaming-kafka",
"streaming-mqtt", "streaming-twitter", "streaming-zeromq").map(ProjectRef(buildLocation, _))
val optionallyEnabledProjects@Seq(yarn, yarnStable, yarnAlpha, java8Tests, sparkGangliaLgpl, sparkKinesisAsl) =
@@ -44,8 +44,9 @@ object BuildCommons {
val assemblyProjects@Seq(assembly, examples) = Seq("assembly", "examples")
.map(ProjectRef(buildLocation, _))
- val tools = "tools"
-
+ val tools = ProjectRef(buildLocation, "tools")
+ // Root project.
+ val spark = ProjectRef(buildLocation, "spark")
val sparkHome = buildLocation
}
@@ -126,26 +127,6 @@ object SparkBuild extends PomBuild {
publishLocalBoth <<= Seq(publishLocal in MavenCompile, publishLocal).dependOn
)
- /** Following project only exists to pull previous artifacts of Spark for generating
- Mima ignores. For more information see: SPARK 2071 */
- lazy val oldDeps = Project("oldDeps", file("dev"), settings = oldDepsSettings)
-
- def versionArtifact(id: String): Option[sbt.ModuleID] = {
- val fullId = id + "_2.10"
- Some("org.apache.spark" % fullId % "1.0.0")
- }
-
- def oldDepsSettings() = Defaults.defaultSettings ++ Seq(
- name := "old-deps",
- scalaVersion := "2.10.4",
- retrieveManaged := true,
- retrievePattern := "[type]s/[artifact](-[revision])(-[classifier]).[ext]",
- libraryDependencies := Seq("spark-streaming-mqtt", "spark-streaming-zeromq",
- "spark-streaming-flume", "spark-streaming-kafka", "spark-streaming-twitter",
- "spark-streaming", "spark-mllib", "spark-bagel", "spark-graphx",
- "spark-core").map(versionArtifact(_).get intransitive())
- )
-
def enable(settings: Seq[Setting[_]])(projectRef: ProjectRef) = {
val existingSettings = projectsMap.getOrElse(projectRef.project, Seq[Setting[_]]())
projectsMap += (projectRef.project -> (existingSettings ++ settings))
@@ -184,7 +165,7 @@ object SparkBuild extends PomBuild {
super.projectDefinitions(baseDirectory).map { x =>
if (projectsMap.exists(_._1 == x.id)) x.settings(projectsMap(x.id): _*)
else x.settings(Seq[Setting[_]](): _*)
- } ++ Seq[Project](oldDeps)
+ } ++ Seq[Project](OldDeps.project)
}
}
@@ -193,6 +174,31 @@ object Flume {
lazy val settings = sbtavro.SbtAvro.avroSettings
}
+/**
+ * Following project only exists to pull previous artifacts of Spark for generating
+ * Mima ignores. For more information see: SPARK 2071
+ */
+object OldDeps {
+
+ lazy val project = Project("oldDeps", file("dev"), settings = oldDepsSettings)
+
+ def versionArtifact(id: String): Option[sbt.ModuleID] = {
+ val fullId = id + "_2.10"
+ Some("org.apache.spark" % fullId % "1.0.0")
+ }
+
+ def oldDepsSettings() = Defaults.defaultSettings ++ Seq(
+ name := "old-deps",
+ scalaVersion := "2.10.4",
+ retrieveManaged := true,
+ retrievePattern := "[type]s/[artifact](-[revision])(-[classifier]).[ext]",
+ libraryDependencies := Seq("spark-streaming-mqtt", "spark-streaming-zeromq",
+ "spark-streaming-flume", "spark-streaming-kafka", "spark-streaming-twitter",
+ "spark-streaming", "spark-mllib", "spark-bagel", "spark-graphx",
+ "spark-core").map(versionArtifact(_).get intransitive())
+ )
+}
+
object Catalyst {
lazy val settings = Seq(
addCompilerPlugin("org.scalamacros" % "paradise" % "2.0.1" cross CrossVersion.full),
@@ -285,9 +291,9 @@ object Unidoc {
publish := {},
unidocProjectFilter in(ScalaUnidoc, unidoc) :=
- inAnyProject -- inProjects(repl, examples, tools, catalyst, yarn, yarnAlpha),
+ inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, catalyst, yarn, yarnAlpha),
unidocProjectFilter in(JavaUnidoc, unidoc) :=
- inAnyProject -- inProjects(repl, bagel, graphx, examples, tools, catalyst, yarn, yarnAlpha),
+ inAnyProject -- inProjects(OldDeps.project, repl, bagel, graphx, examples, tools, catalyst, yarn, yarnAlpha),
// Skip class names containing $ and some internal packages in Javadocs
unidocAllSources in (JavaUnidoc, unidoc) := {
diff --git a/project/plugins.sbt b/project/plugins.sbt
index 06d18e1930..2a61f56c2e 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -23,6 +23,6 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.1.6")
addSbtPlugin("com.alpinenow" % "junit_xml_listener" % "0.5.1")
-addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.3.0")
+addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.3.1")
addSbtPlugin("com.cavorite" % "sbt-avro" % "0.3.2")