From f686e3dacb02d42dd2bb9695a96cecd85786d7b5 Mon Sep 17 00:00:00 2001 From: Ismael Juma Date: Fri, 15 Jul 2011 03:38:25 +0100 Subject: Initial work on converting build to SBT 0.10.1 --- project/SparkBuild.scala | 101 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 101 insertions(+) create mode 100644 project/SparkBuild.scala (limited to 'project/SparkBuild.scala') diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala new file mode 100644 index 0000000000..b6191561a0 --- /dev/null +++ b/project/SparkBuild.scala @@ -0,0 +1,101 @@ +import sbt._ +import Keys._ + +object SparkBuild extends Build { + + lazy val root = Project("root", file("."), settings = sharedSettings) aggregate(core, repl, examples, bagel) + + lazy val core = Project("core", file("core"), settings = coreSettings) + + lazy val repl = Project("repl", file("repl"), settings = replSettings) dependsOn (core) + + lazy val examples = Project("examples", file("examples"), settings = examplesSettings) dependsOn (core) + + lazy val bagel = Project("bagel", file("bagel"), settings = bagelSettings) dependsOn (core) + + def sharedSettings = Defaults.defaultSettings ++ Seq( + organization := "org.spark-project", + version := "version=0.4-SNAPSHOT", + scalaVersion := "2.9.0-1", + scalacOptions := Seq(/*"-deprecation",*/ "-unchecked"), // TODO Enable -deprecation and fix all warnings + unmanagedJars in Compile <<= baseDirectory map { base => (base ** "*.jar").classpath }, + retrieveManaged := true, + transitiveClassifiers in Scope.GlobalScope := Seq("sources"), + libraryDependencies ++= Seq( + "org.eclipse.jetty" % "jetty-server" % "7.4.2.v20110526", + "org.scalatest" % "scalatest_2.9.0" % "1.4.1" % "test", + "org.scala-tools.testing" % "scalacheck_2.9.0" % "1.9" % "test" + ) + ) + + val slf4jVersion = "1.6.1" + + //FIXME DepJar and XmlTestReport + def coreSettings = sharedSettings ++ Seq(libraryDependencies ++= Seq( + "com.google.guava" % "guava" % "r09", + "log4j" % "log4j" % "1.2.16", + "org.slf4j" % "slf4j-api" % slf4jVersion, + "org.slf4j" % "slf4j-log4j12" % slf4jVersion, + "com.ning" % "compress-lzf" % "0.7.0", + "org.apache.hadoop" % "hadoop-core" % "0.20.2", + "asm" % "asm-all" % "3.3.1" + )) + + //FIXME DepJar and XmlTestReport + def replSettings = sharedSettings ++ Seq(libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _)) + + def examplesSettings = sharedSettings ++ Seq(libraryDependencies += "colt" % "colt" % "1.2.0") + + //FIXME DepJar and XmlTestReport + def bagelSettings = sharedSettings +} + +// Project mixin for an XML-based ScalaTest report. Unfortunately +// there is currently no way to call this directly from SBT without +// executing a subprocess. +//trait XmlTestReport extends BasicScalaProject { +// def testReportDir = outputPath / "test-report" +// +// lazy val testReport = task { +// log.info("Creating " + testReportDir + "...") +// if (!testReportDir.exists) { +// testReportDir.asFile.mkdirs() +// } +// log.info("Executing org.scalatest.tools.Runner...") +// val command = ("scala -classpath " + testClasspath.absString + +// " org.scalatest.tools.Runner -o " + +// " -u " + testReportDir.absolutePath + +// " -p " + (outputPath / "test-classes").absolutePath) +// Process(command, path("."), "JAVA_OPTS" -> "-Xmx500m") ! +// +// None +// }.dependsOn(compile, testCompile).describedAs("Generate XML test report.") +//} + +// Project mixin for creating a JAR with a project's dependencies. This is based +// on the AssemblyBuilder plugin, but because this plugin attempts to package Scala +// and our project too, we leave that out using our own exclude filter (depJarExclude). +//trait DepJar extends AssemblyBuilder { +// def depJarExclude(base: PathFinder) = { +// (base / "scala" ** "*") +++ // exclude scala library +// (base / "spark" ** "*") +++ // exclude Spark classes +// ((base / "META-INF" ** "*") --- // generally ignore the hell out of META-INF +// (base / "META-INF" / "services" ** "*") --- // include all service providers +// (base / "META-INF" / "maven" ** "*")) // include all Maven POMs and such +// } +// +// def depJarTempDir = outputPath / "dep-classes" +// +// def depJarOutputPath = +// outputPath / (name.toLowerCase.replace(" ", "-") + "-dep-" + version.toString + ".jar") +// +// lazy val depJar = { +// packageTask( +// Path.lazyPathFinder(assemblyPaths(depJarTempDir, +// assemblyClasspath, +// assemblyExtraJars, +// depJarExclude)), +// depJarOutputPath, +// packageOptions) +// }.dependsOn(compile).describedAs("Bundle project's dependencies into a JAR.") +//} -- cgit v1.2.3 From 635f501492f04df531977c49208a20a166d8656a Mon Sep 17 00:00:00 2001 From: Ismael Juma Date: Mon, 18 Jul 2011 00:13:37 +0100 Subject: Fix copy & paste error in version. --- project/SparkBuild.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'project/SparkBuild.scala') diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index b6191561a0..8a6d681d31 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -15,7 +15,7 @@ object SparkBuild extends Build { def sharedSettings = Defaults.defaultSettings ++ Seq( organization := "org.spark-project", - version := "version=0.4-SNAPSHOT", + version := "0.4-SNAPSHOT", scalaVersion := "2.9.0-1", scalacOptions := Seq(/*"-deprecation",*/ "-unchecked"), // TODO Enable -deprecation and fix all warnings unmanagedJars in Compile <<= baseDirectory map { base => (base ** "*.jar").classpath }, -- cgit v1.2.3 From 8531c2a079b971be7c14a30a716fbd2e514d006f Mon Sep 17 00:00:00 2001 From: Ismael Juma Date: Mon, 18 Jul 2011 00:16:08 +0100 Subject: Update test dependencies. --- project/SparkBuild.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'project/SparkBuild.scala') diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 8a6d681d31..cd2e3211fe 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -23,8 +23,8 @@ object SparkBuild extends Build { transitiveClassifiers in Scope.GlobalScope := Seq("sources"), libraryDependencies ++= Seq( "org.eclipse.jetty" % "jetty-server" % "7.4.2.v20110526", - "org.scalatest" % "scalatest_2.9.0" % "1.4.1" % "test", - "org.scala-tools.testing" % "scalacheck_2.9.0" % "1.9" % "test" + "org.scalatest" % "scalatest_2.9.0" % "1.6.1" % "test", + "org.scala-tools.testing" % "scalacheck_2.9.0-1" % "1.9" % "test" ) ) -- cgit v1.2.3 From 51673ca62e5051cd29d21d1635bc5ef0f8739337 Mon Sep 17 00:00:00 2001 From: Ismael Juma Date: Mon, 18 Jul 2011 10:34:51 +0100 Subject: Introduce DepJarPlugin based on AssemblyPlugin and use it in SparkBuild. --- project/DepJar.scala | 108 ++++++++++++++++++++++++++++++++++++++++++++++ project/SparkBuild.scala | 42 ++++-------------- project/plugins/build.sbt | 2 - 3 files changed, 116 insertions(+), 36 deletions(-) create mode 100644 project/DepJar.scala (limited to 'project/SparkBuild.scala') diff --git a/project/DepJar.scala b/project/DepJar.scala new file mode 100644 index 0000000000..1d54005690 --- /dev/null +++ b/project/DepJar.scala @@ -0,0 +1,108 @@ +import sbt._ +import Keys._ +import java.io.PrintWriter +import scala.collection.mutable +import scala.io.Source +import Project.Initialize + +/* + * This is based on the AssemblyPlugin. For now it was easier to copy and modify than to wait for + * the required changes needed for us to customise it so that it does what we want. We may revisit + * this in the future. + */ +object DepJarPlugin extends Plugin { + val DepJar = config("dep-jar") extend(Runtime) + val depJar = TaskKey[File]("dep-jar", "Builds a single-file jar of all dependencies.") + + val jarName = SettingKey[String]("jar-name") + val outputPath = SettingKey[File]("output-path") + val excludedFiles = SettingKey[Seq[File] => Seq[File]]("excluded-files") + val conflictingFiles = SettingKey[Seq[File] => Seq[File]]("conflicting-files") + + private def assemblyTask: Initialize[Task[File]] = + (test, packageOptions, cacheDirectory, outputPath, + fullClasspath, excludedFiles, conflictingFiles, streams) map { + (test, options, cacheDir, jarPath, cp, exclude, conflicting, s) => + IO.withTemporaryDirectory { tempDir => + val srcs = assemblyPaths(tempDir, cp, exclude, conflicting, s.log) + val config = new Package.Configuration(srcs, jarPath, options) + Package(config, cacheDir, s.log) + jarPath + } + } + + private def assemblyPackageOptionsTask: Initialize[Task[Seq[PackageOption]]] = + (packageOptions in Compile, mainClass in DepJar) map { (os, mainClass) => + mainClass map { s => + os find { o => o.isInstanceOf[Package.MainClass] } map { _ => os + } getOrElse { Package.MainClass(s) +: os } + } getOrElse {os} + } + + private def assemblyExcludedFiles(base: Seq[File]): Seq[File] = { + ((base / "scala" ** "*") +++ // exclude scala library + (base / "spark" ** "*") +++ // exclude Spark classes + ((base / "META-INF" ** "*") --- // generally ignore the hell out of META-INF + (base / "META-INF" / "services" ** "*") --- // include all service providers + (base / "META-INF" / "maven" ** "*"))).get // include all Maven POMs and such + } + + private def assemblyPaths(tempDir: File, classpath: Classpath, + exclude: Seq[File] => Seq[File], conflicting: Seq[File] => Seq[File], log: Logger) = { + import sbt.classpath.ClasspathUtilities + + val (libs, directories) = classpath.map(_.data).partition(ClasspathUtilities.isArchive) + val services = mutable.Map[String, mutable.ArrayBuffer[String]]() + for(jar <- libs) { + val jarName = jar.asFile.getName + log.info("Including %s".format(jarName)) + IO.unzip(jar, tempDir) + IO.delete(conflicting(Seq(tempDir))) + val servicesDir = tempDir / "META-INF" / "services" + if (servicesDir.asFile.exists) { + for (service <- (servicesDir ** "*").get) { + val serviceFile = service.asFile + if (serviceFile.exists && serviceFile.isFile) { + val entries = services.getOrElseUpdate(serviceFile.getName, new mutable.ArrayBuffer[String]()) + for (provider <- Source.fromFile(serviceFile).getLines) { + if (!entries.contains(provider)) { + entries += provider + } + } + } + } + } + } + + for ((service, providers) <- services) { + log.debug("Merging providers for %s".format(service)) + val serviceFile = (tempDir / "META-INF" / "services" / service).asFile + val writer = new PrintWriter(serviceFile) + for (provider <- providers.map { _.trim }.filter { !_.isEmpty }) { + log.debug("- %s".format(provider)) + writer.println(provider) + } + writer.close() + } + + val base = tempDir +: directories + val descendants = ((base ** (-DirectoryFilter)) --- exclude(base)).get + descendants x relativeTo(base) + } + + lazy val depJarSettings = inConfig(DepJar)(Seq( + depJar <<= packageBin.identity, + packageBin <<= assemblyTask, + jarName <<= (name, version) { (name, version) => name + "-dep-" + version + ".jar" }, + outputPath <<= (target, jarName) { (t, s) => t / s }, + test <<= (test in Test).identity, + mainClass <<= (mainClass in Runtime).identity, + fullClasspath <<= (fullClasspath in Runtime).identity, + packageOptions <<= assemblyPackageOptionsTask, + excludedFiles := assemblyExcludedFiles _, + conflictingFiles := assemblyExcludedFiles _ + )) ++ + Seq( + depJar <<= (depJar in DepJar).identity + ) +} \ No newline at end of file diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index cd2e3211fe..22279b7919 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -30,7 +30,7 @@ object SparkBuild extends Build { val slf4jVersion = "1.6.1" - //FIXME DepJar and XmlTestReport + //FIXME XmlTestReport def coreSettings = sharedSettings ++ Seq(libraryDependencies ++= Seq( "com.google.guava" % "guava" % "r09", "log4j" % "log4j" % "1.2.16", @@ -39,15 +39,17 @@ object SparkBuild extends Build { "com.ning" % "compress-lzf" % "0.7.0", "org.apache.hadoop" % "hadoop-core" % "0.20.2", "asm" % "asm-all" % "3.3.1" - )) + )) ++ DepJarPlugin.depJarSettings - //FIXME DepJar and XmlTestReport - def replSettings = sharedSettings ++ Seq(libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _)) + //FIXME XmlTestReport + def replSettings = sharedSettings ++ + Seq(libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _)) ++ + DepJarPlugin.depJarSettings def examplesSettings = sharedSettings ++ Seq(libraryDependencies += "colt" % "colt" % "1.2.0") - //FIXME DepJar and XmlTestReport - def bagelSettings = sharedSettings + //FIXME XmlTestReport + def bagelSettings = sharedSettings ++ DepJarPlugin.depJarSettings } // Project mixin for an XML-based ScalaTest report. Unfortunately @@ -71,31 +73,3 @@ object SparkBuild extends Build { // None // }.dependsOn(compile, testCompile).describedAs("Generate XML test report.") //} - -// Project mixin for creating a JAR with a project's dependencies. This is based -// on the AssemblyBuilder plugin, but because this plugin attempts to package Scala -// and our project too, we leave that out using our own exclude filter (depJarExclude). -//trait DepJar extends AssemblyBuilder { -// def depJarExclude(base: PathFinder) = { -// (base / "scala" ** "*") +++ // exclude scala library -// (base / "spark" ** "*") +++ // exclude Spark classes -// ((base / "META-INF" ** "*") --- // generally ignore the hell out of META-INF -// (base / "META-INF" / "services" ** "*") --- // include all service providers -// (base / "META-INF" / "maven" ** "*")) // include all Maven POMs and such -// } -// -// def depJarTempDir = outputPath / "dep-classes" -// -// def depJarOutputPath = -// outputPath / (name.toLowerCase.replace(" ", "-") + "-dep-" + version.toString + ".jar") -// -// lazy val depJar = { -// packageTask( -// Path.lazyPathFinder(assemblyPaths(depJarTempDir, -// assemblyClasspath, -// assemblyExtraJars, -// depJarExclude)), -// depJarOutputPath, -// packageOptions) -// }.dependsOn(compile).describedAs("Bundle project's dependencies into a JAR.") -//} diff --git a/project/plugins/build.sbt b/project/plugins/build.sbt index 8585d2c6a1..91c6cb4df1 100644 --- a/project/plugins/build.sbt +++ b/project/plugins/build.sbt @@ -11,5 +11,3 @@ libraryDependencies += "com.github.mpeltonen" %% "sbt-idea" % "0.10.0" libraryDependencies <<= (libraryDependencies, sbtVersion) { (deps, version) => deps :+ ("com.typesafe.sbteclipse" %% "sbteclipse" % "1.2" extra("sbtversion" -> version)) } - -libraryDependencies <+= (sbtVersion) { sv => "com.eed3si9n" %% "sbt-assembly" % ("sbt" + sv + "_0.3") } \ No newline at end of file -- cgit v1.2.3 From fc0a2c8db8813d346f88fa43075bff19d875e59a Mon Sep 17 00:00:00 2001 From: Ismael Juma Date: Thu, 21 Jul 2011 01:04:29 +0100 Subject: Add and configure junit_xml_listener as a replacement for XmlTestReport. --- project/SparkBuild.scala | 26 +------------------------- project/plugins/project/SparkPluginBuild.scala | 7 +++++++ 2 files changed, 8 insertions(+), 25 deletions(-) create mode 100644 project/plugins/project/SparkPluginBuild.scala (limited to 'project/SparkBuild.scala') diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 22279b7919..456c0da0db 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -21,6 +21,7 @@ object SparkBuild extends Build { unmanagedJars in Compile <<= baseDirectory map { base => (base ** "*.jar").classpath }, retrieveManaged := true, transitiveClassifiers in Scope.GlobalScope := Seq("sources"), + testListeners <<= target.map(t => Seq(new eu.henkelmann.sbt.JUnitXmlTestsListener(t.getAbsolutePath))), libraryDependencies ++= Seq( "org.eclipse.jetty" % "jetty-server" % "7.4.2.v20110526", "org.scalatest" % "scalatest_2.9.0" % "1.6.1" % "test", @@ -30,7 +31,6 @@ object SparkBuild extends Build { val slf4jVersion = "1.6.1" - //FIXME XmlTestReport def coreSettings = sharedSettings ++ Seq(libraryDependencies ++= Seq( "com.google.guava" % "guava" % "r09", "log4j" % "log4j" % "1.2.16", @@ -41,35 +41,11 @@ object SparkBuild extends Build { "asm" % "asm-all" % "3.3.1" )) ++ DepJarPlugin.depJarSettings - //FIXME XmlTestReport def replSettings = sharedSettings ++ Seq(libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _)) ++ DepJarPlugin.depJarSettings def examplesSettings = sharedSettings ++ Seq(libraryDependencies += "colt" % "colt" % "1.2.0") - //FIXME XmlTestReport def bagelSettings = sharedSettings ++ DepJarPlugin.depJarSettings } - -// Project mixin for an XML-based ScalaTest report. Unfortunately -// there is currently no way to call this directly from SBT without -// executing a subprocess. -//trait XmlTestReport extends BasicScalaProject { -// def testReportDir = outputPath / "test-report" -// -// lazy val testReport = task { -// log.info("Creating " + testReportDir + "...") -// if (!testReportDir.exists) { -// testReportDir.asFile.mkdirs() -// } -// log.info("Executing org.scalatest.tools.Runner...") -// val command = ("scala -classpath " + testClasspath.absString + -// " org.scalatest.tools.Runner -o " + -// " -u " + testReportDir.absolutePath + -// " -p " + (outputPath / "test-classes").absolutePath) -// Process(command, path("."), "JAVA_OPTS" -> "-Xmx500m") ! -// -// None -// }.dependsOn(compile, testCompile).describedAs("Generate XML test report.") -//} diff --git a/project/plugins/project/SparkPluginBuild.scala b/project/plugins/project/SparkPluginBuild.scala new file mode 100644 index 0000000000..999611982a --- /dev/null +++ b/project/plugins/project/SparkPluginBuild.scala @@ -0,0 +1,7 @@ +import sbt._ + +object SparkPluginDef extends Build { + lazy val root = Project("plugins", file(".")) dependsOn(junitXmlListener) + /* This is not published in a Maven repository, so we get it from GitHub directly */ + lazy val junitXmlListener = uri("git://github.com/ijuma/junit_xml_listener.git#fe434773255b451a38e8d889536ebc260f4225ce") +} \ No newline at end of file -- cgit v1.2.3