summaryrefslogtreecommitdiff
path: root/project
diff options
context:
space:
mode:
authorJosh Suereth <joshua.suereth@gmail.com>2011-12-01 10:18:08 -0500
committerJosh Suereth <joshua.suereth@gmail.com>2011-12-01 10:18:08 -0500
commitd4e16acd28f1dec82c213403f78d0e33cca4a791 (patch)
treea9726339fa9b07936e827ef146218bc813c7f057 /project
parent51f5831b0c0d14c28938a6f537b93f183217d942 (diff)
downloadscala-d4e16acd28f1dec82c213403f78d0e33cca4a791.tar.gz
scala-d4e16acd28f1dec82c213403f78d0e33cca4a791.tar.bz2
scala-d4e16acd28f1dec82c213403f78d0e33cca4a791.zip
Port of SBT 0.11.x build. Things appear to be working well.
Diffstat (limited to 'project')
-rw-r--r--project/Build.scala574
-rw-r--r--project/Layers.scala90
-rw-r--r--project/Partest.scala141
-rw-r--r--project/Release.scala94
-rw-r--r--project/Sametest.scala66
-rw-r--r--project/VerifyClassLoad.scala46
-rw-r--r--project/build.properties11
-rw-r--r--project/build/AdditionalResources.scala81
-rw-r--r--project/build/BasicLayer.scala296
-rw-r--r--project/build/BuildInfoEnvironment.scala21
-rw-r--r--project/build/Comparator.scala72
-rw-r--r--project/build/Compilation.scala104
-rw-r--r--project/build/CompilationStep.scala39
-rw-r--r--project/build/ForkSBT.scala49
-rw-r--r--project/build/Packer.scala122
-rwxr-xr-xproject/build/Partest.scala370
-rw-r--r--project/build/PathConfig.scala43
-rw-r--r--project/build/SVN.scala36
-rw-r--r--project/build/ScalaBuildProject.scala36
-rw-r--r--project/build/ScalaSBTBuilder.scala362
-rw-r--r--project/build/ScalaTools.scala179
-rw-r--r--project/build/Scaladoc.scala48
-rw-r--r--project/plugins/Plugins.scala6
-rw-r--r--project/plugins/build.sbt2
-rw-r--r--project/plugins/project/Build.scala7
25 files changed, 1020 insertions, 1875 deletions
diff --git a/project/Build.scala b/project/Build.scala
new file mode 100644
index 0000000000..dd75b92734
--- /dev/null
+++ b/project/Build.scala
@@ -0,0 +1,574 @@
+import sbt._
+import Keys._
+import partest._
+import SameTest._
+
+object ScalaBuild extends Build with Layers {
+ // New tasks/settings specific to the scala build.
+ lazy val lockerLock: TaskKey[Unit] = TaskKey("locker-lock",
+ "Locks the locker layer of the compiler build such that it won't rebuild on changed source files.")
+ lazy val lockerUnlock: TaskKey[Unit] = TaskKey("locker-unlock",
+ "Unlocks the locker layer of the compiler so that it will be recompiled on changed source files.")
+ lazy val lockFile: SettingKey[File] = SettingKey("lock-file",
+ "Location of the lock file compiling this project.")
+ lazy val makeDist: TaskKey[File] = TaskKey("make-dist",
+ "Creates a mini-distribution (scala home directory) for this build in a zip file.")
+ lazy val makeExplodedDist: TaskKey[File] = TaskKey("make-exploded-dist",
+ "Creates a mini-distribution (scala home directory) for this build in a directory.")
+ lazy val makeDistMappings: TaskKey[Map[File, String]] = TaskKey("make-dist-mappings",
+ "Creates distribution mappings for creating zips,jars,directorys,etc.")
+ lazy val buildFixed = AttributeKey[Boolean]("build-uri-fixed")
+
+ // Build wide settings:
+ override lazy val settings = super.settings ++ Seq(
+ autoScalaLibrary := false,
+ resolvers += Resolver.url(
+ "Typesafe nightlies",
+ url("https://typesafe.artifactoryonline.com/typesafe/ivy-snapshots/")
+ )(Resolver.ivyStylePatterns),
+ resolvers ++= Seq(
+ "junit interface repo" at "https://repository.jboss.org/nexus/content/repositories/scala-tools-releases",
+ ScalaToolsSnapshots
+ ),
+ organization := "org.scala-lang",
+ version := "2.10.0-SNAPSHOT",
+ scalaVersion := "2.10.0-SNAPSHOT",
+ pomExtra := <xml:group>
+ <inceptionYear>2002</inceptionYear>
+ <licenses>
+ <license>
+ <name>BSD-like</name>
+ <url>http://www.scala-lang.org/downloads/license.html</url>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
+ </scm>
+ <issueManagement>
+ <system>jira</system>
+ <url>http://issues.scala-lang.org</url>
+ </issueManagement>
+ </xml:group>,
+ commands += Command.command("fix-uri-projects") { (state: State) =>
+ if(state.get(buildFixed) getOrElse false) state
+ else {
+ // TODO -fix up scalacheck's dependencies!
+ val extracted = Project.extract(state)
+ import extracted._
+ def fix(s: Setting[_]): Setting[_] = s match {
+ case ScopedExternalSetting(`scalacheck`, scalaInstance.key, setting) => fullQuickScalaReference mapKey Project.mapScope(_ => s.key.scope)
+ case s => s
+ }
+ val transformed = session.mergeSettings map ( s => fix(s) )
+ val scopes = transformed collect { case ScopedExternalSetting(`scalacheck`, _, s) => s.key.scope } toSet
+ // Create some fixers so we don't download scala or rely on it.
+ val fixers = for { scope <- scopes
+ setting <- Seq(autoScalaLibrary := false, crossPaths := false)
+ } yield setting mapKey Project.mapScope(_ => scope)
+ val newStructure = Load.reapply(transformed ++ fixers, structure)
+ Project.setProject(session, newStructure, state).put(buildFixed, true)
+ }
+ },
+ onLoad in Global <<= (onLoad in Global) apply (_ andThen { (state: State) =>
+ "fix-uri-projects" :: state
+ })
+ )
+
+ // Collections of projects to run 'compile' on.
+ lazy val compiledProjects = Seq(quickLib, quickComp, continuationsLibrary, actors, swing, dbc, forkjoin, fjbg, msil)
+ // Collection of projects to 'package' and 'publish' together.
+ lazy val packagedBinaryProjects = Seq(scalaLibrary, scalaCompiler, swing, dbc, continuationsPlugin, jline, scalap)
+ lazy val partestRunProjects = Seq(testsuite, continuationsTestsuite)
+
+ private def epflPomExtra = (
+ <xml:group>
+ <inceptionYear>2002</inceptionYear>
+ <licenses>
+ <license>
+ <name>BSD-like</name>
+ <url>http://www.scala-lang.org/downloads/license.html</url>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
+ </scm>
+ <issueManagement>
+ <system>jira</system>
+ <url>http://issues.scala-lang.org</url>
+ </issueManagement>
+ </xml:group>
+ )
+
+ // maven/ivy settings I pulled from build.sbt to keep it lean and mean
+ // XXX not sure where they go though, please advise or fix
+ def publishSettings: Seq[Setting[_]] = Seq(
+ ivyScala ~= ((is: Option[IvyScala]) => is.map(_.copy(checkExplicit = false))),
+ pomIncludeRepository := (_ => false),
+ publishMavenStyle := true,
+ makePomConfiguration <<= makePomConfiguration apply (_.copy(configurations = Some(Seq(Compile, Default)))),
+ pomExtra := epflPomExtra
+ )
+
+ // Settings for root project. These are aggregate tasks against the rest of the build.
+ def projectSettings: Seq[Setting[_]] = publishSettings ++ Seq(
+ doc in Compile <<= (doc in documentation in Compile).identity,
+ // These next two aggregate commands on several projects and return results that are to be ignored by remaining tasks.
+ compile in Compile <<= compiledProjects.map(p => compile in p in Compile).join.map(_.head),
+ // TODO - just clean target? i.e. target map IO.deleteRecursively
+ clean <<= (compiledProjects ++ partestRunProjects).map(p => clean in p).dependOn,
+ packageBin in Compile <<= packagedBinaryProjects.map(p => packageBin in p in Compile).join.map(_.head),
+ // TODO - Make sure scalaLibrary has packageDoc + packageSrc from documentation attached...
+ publish <<= packagedBinaryProjects.map(p => publish in p).join.map(_.head),
+ publishLocal <<= packagedBinaryProjects.map(p => publishLocal in p).join.map(_.head),
+ packageDoc in Compile <<= (packageDoc in documentation in Compile).identity,
+ packageSrc in Compile <<= (packageSrc in documentation in Compile).identity,
+ test in Test <<= (runPartest in testsuite, runPartest in continuationsTestsuite, checkSame in testsuite) map { (a,b,c) => () },
+ lockerLock <<= (lockFile in lockerLib, lockFile in lockerComp, compile in Compile in lockerLib, compile in Compile in lockerComp) map { (lib, comp, _, _) =>
+ Seq(lib,comp).foreach(f => IO.touch(f))
+ },
+ lockerUnlock <<= (lockFile in lockerLib, lockFile in lockerComp) map { (lib, comp) =>
+ Seq(lib,comp).foreach(IO.delete)
+ },
+ genBinQuick <<= (genBinQuick in scaladist).identity,
+ makeDist <<= (makeDist in scaladist).identity,
+ makeExplodedDist <<= (makeExplodedDist in scaladist).identity,
+ // Note: We override unmanagedSources so that ~ compile will look at all these sources, then run our aggregated compile...
+ unmanagedSourceDirectories in Compile <<= baseDirectory apply (_ / "src") apply { dir =>
+ Seq("library/scala","actors","compiler","fjbg","swing","continuations/library","forkjoin") map (dir / _)
+ },
+ // TODO - Make exported products == makeDist so we can use this when creating a *real* distribution.
+ commands += Release.pushStarr
+ //commands += Release.setStarrHome
+ )
+ // Note: Root project is determined by lowest-alphabetical project that has baseDirectory as file("."). we use aaa_ to 'win'.
+ lazy val aaa_root = Project("scala", file(".")) settings(projectSettings: _*)
+
+ // External dependencies used for various projects
+ lazy val externalDeps: Setting[_] = libraryDependencies <<= (sbtVersion)(v =>
+ Seq(
+ "org.apache.ant" % "ant" % "1.8.2",
+ "org.scala-tools.sbt" % "compiler-interface" % v % "provided"
+ )
+ )
+
+ // These are setting overrides for most artifacts in the Scala build file.
+ // TODO - what can we move into build.sbt...
+ // @PP: Ha, and here I'm moving stuff out of it. Clearly I need to
+ // be educated on the merits of having more stuff in build.sbt. I think
+ // of it as a good place for items I am frequently changing (logLevel,
+ // compiler options, etc.) and not so good for anything else. But you
+ // use this stuff a lot more than I do.
+ def settingOverrides: Seq[Setting[_]] = Seq(
+ crossPaths := false,
+ publishArtifact in packageDoc := false,
+ publishArtifact in packageSrc := false,
+ target <<= (baseDirectory, name) apply (_ / "target" / _),
+ (classDirectory in Compile) <<= target(_ / "classes"),
+ javacOptions ++= Seq("-target", "1.5", "-source", "1.5"),
+ scalaSource in Compile <<= (baseDirectory, name) apply (_ / "src" / _),
+ javaSource in Compile <<= (baseDirectory, name) apply (_ / "src" / _),
+ autoScalaLibrary := false,
+ unmanagedJars in Compile := Seq(),
+ // Most libs in the compiler use this order to build.
+ compileOrder in Compile := CompileOrder.JavaThenScala,
+ lockFile <<= target(_ / "compile.lock"),
+ skip in Compile <<= lockFile.map(_ exists)
+ )
+
+ // --------------------------------------------------------------
+ // Libraries used by Scalac that change infrequently
+ // (or hopefully so).
+ // --------------------------------------------------------------
+
+ // Jline nested project. Compile this sucker once and be done.
+ lazy val jline = Project("jline", file("src/jline"))
+ // Fast Java Bytecode Generator (nested in every scala-compiler.jar)
+ lazy val fjbg = Project("fjbg", file(".")) settings(settingOverrides : _*)
+ // Forkjoin backport
+ lazy val forkjoin = Project("forkjoin", file(".")) settings(settingOverrides : _*)
+
+ // MSIL code generator
+ // TODO - This probably needs to compile in the layers, but Sabbus
+ // had it building against locker, so we'll do worse and build
+ // build against STARR for now.
+ lazy val msilSettings = settingOverrides ++ Seq(
+ defaultExcludes := ("tests"),
+ javacOptions ++= Seq("-source", "1.4"),
+ STARR
+ )
+ lazy val msil = Project("msil", file(".")) settings(msilSettings: _*)
+
+ // --------------------------------------------------------------
+ // The magic kingdom.
+ // Layered compilation of Scala.
+ // Stable Reference -> Locker ('Lockable' dev version) -> Quick -> Strap (Binary compatibility testing)
+ // --------------------------------------------------------------
+
+ // Need a report on this...
+ // TODO - Resolve STARR from a repo..
+ lazy val STARR = scalaInstance <<= appConfiguration map { app =>
+ val launcher = app.provider.scalaProvider.launcher
+ val library = file("lib/scala-library.jar")
+ val compiler = file("lib/scala-compiler.jar")
+ val libJars = (file("lib") * "*.jar").get filterNot Set(library, compiler)
+
+ ScalaInstance("starr", library, compiler, launcher, libJars: _*)
+ }
+
+ // Locker is a lockable Scala compiler that can be built of 'current' source to perform rapid development.
+ lazy val (lockerLib, lockerComp) = makeLayer("locker", STARR)
+ lazy val locker = Project("locker", file(".")) aggregate(lockerLib, lockerComp)
+
+ // Quick is the general purpose project layer for the Scala compiler.
+ lazy val (quickLib, quickComp) = makeLayer("quick", makeScalaReference("locker", lockerLib, lockerComp, fjbg))
+ lazy val quick = Project("quick", file(".")) aggregate(quickLib, quickComp)
+
+ // Reference to quick scala instance.
+ lazy val quickScalaInstance = makeScalaReference("quick", quickLib, quickComp, fjbg)
+ def quickScalaLibraryDependency = unmanagedClasspath in Compile <++= (exportedProducts in quickLib in Compile).identity
+ def quickScalaCompilerDependency = unmanagedClasspath in Compile <++= (exportedProducts in quickComp in Compile).identity
+
+ // Strapp is used to test binary 'sameness' between things built with locker and things built with quick.
+ lazy val (strappLib, strappComp) = makeLayer("strapp", quickScalaInstance)
+
+ // --------------------------------------------------------------
+ // Projects dependent on layered compilation (quick)
+ // --------------------------------------------------------------
+ def addCheaterDependency(projectName: String): Setting[_] =
+ pomPostProcess <<= (version, organization, pomPostProcess) apply { (v,o,k) =>
+ val dependency: scala.xml.Node =
+ <dependency>
+ <groupId>{o}</groupId>
+ <artifactid>{projectName}</artifactid>
+ <version>{v}</version>
+ </dependency>
+ def fixDependencies(node: scala.xml.Node): scala.xml.Node = node match {
+ case <dependencies>{nested@_*}</dependencies> => <dependencies>{dependency}{nested}</dependencies>
+ case x => x
+ }
+ // This is a hack to get around issues where \ and \\ don't work if any of the children are `scala.xml.Group`.
+ def hasDependencies(root: scala.xml.Node): Boolean =
+ (root.child collectFirst {
+ case n: scala.xml.Elem if n.label == "dependencies" => n
+ } isEmpty)
+ // TODO - Keep namespace on project...
+ k andThen {
+ case n @ <project>{ nested@_*}</project> if hasDependencies(n) =>
+ <project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0">{nested}<dependencies>{dependency}</dependencies></project>
+ case <project>{ nested@_*}</project> =>
+ <project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0">{ nested map fixDependencies }</project>
+ }
+ }
+
+ // TODO - in sabbus, these all use locker to build... I think tihs way is better, but let's farm this idea around.
+ // TODO - Actors + swing separate jars...
+ lazy val dependentProjectSettings = settingOverrides ++ Seq(quickScalaInstance, quickScalaLibraryDependency, addCheaterDependency("scala-library"))
+ lazy val actors = Project("actors", file(".")) settings(dependentProjectSettings:_*) dependsOn(forkjoin % "provided")
+ lazy val dbc = Project("dbc", file(".")) settings(dependentProjectSettings:_*)
+ // TODO - Remove actors dependency from pom...
+ lazy val swing = Project("swing", file(".")) settings(dependentProjectSettings:_*) dependsOn(actors % "provided")
+ // This project will generate man pages (in man1 and html) for scala.
+ lazy val manmakerSettings: Seq[Setting[_]] = dependentProjectSettings :+ externalDeps
+ lazy val manmaker = Project("manual", file(".")) settings(manmakerSettings:_*)
+
+ // Things that compile against the compiler.
+ lazy val compilerDependentProjectSettings = dependentProjectSettings ++ Seq(quickScalaCompilerDependency, addCheaterDependency("scala-compiler"))
+ lazy val partestSettings = compilerDependentProjectSettings :+ externalDeps
+ lazy val partest = Project("partest", file(".")) settings(partestSettings:_*) dependsOn(actors,forkjoin,scalap)
+ lazy val scalapSettings = compilerDependentProjectSettings ++ Seq(
+ name := "scalap",
+ exportJars := true
+ )
+ lazy val scalap = Project("scalap", file(".")) settings(scalapSettings:_*)
+
+ // --------------------------------------------------------------
+ // Continuations plugin + library
+ // --------------------------------------------------------------
+ lazy val continuationsPluginSettings = compilerDependentProjectSettings ++ Seq(
+ scalaSource in Compile <<= baseDirectory(_ / "src/continuations/plugin/"),
+ resourceDirectory in Compile <<= baseDirectory(_ / "src/continuations/plugin/"),
+ exportJars := true,
+ name := "continuations" // Note: This artifact is directly exported.
+
+ )
+ lazy val continuationsPlugin = Project("continuations-plugin", file(".")) settings(continuationsPluginSettings:_*)
+ lazy val continuationsLibrarySettings = dependentProjectSettings ++ Seq(
+ scalaSource in Compile <<= baseDirectory(_ / "src/continuations/library/"),
+ scalacOptions in Compile <++= (exportedProducts in Compile in continuationsPlugin) map {
+ case Seq(cpDir) => Seq("-Xplugin-require:continuations", "-P:continuations:enable", "-Xplugin:"+cpDir.data.getAbsolutePath)
+ }
+ )
+ lazy val continuationsLibrary = Project("continuations-library", file(".")) settings(continuationsLibrarySettings:_*)
+
+ // TODO - OSGi Manifest
+
+ // --------------------------------------------------------------
+ // Real Library Artifact
+ // --------------------------------------------------------------
+ val allSubpathsCopy = (dir: File) => (dir.*** --- dir) x (relativeTo(dir)|flat)
+ def productTaskToMapping(products : Seq[File]) = products flatMap { p => allSubpathsCopy(p) }
+ lazy val packageScalaLibBinTask = Seq(quickLib, continuationsLibrary, forkjoin, actors).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
+ lazy val scalaLibArtifactSettings: Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaLibBinTask)) ++ Seq(
+ name := "scala-library",
+ crossPaths := false,
+ exportJars := true,
+ autoScalaLibrary := false,
+ unmanagedJars in Compile := Seq(),
+ packageDoc in Compile <<= (packageDoc in documentation in Compile).identity,
+ packageSrc in Compile <<= (packageSrc in documentation in Compile).identity,
+ fullClasspath in Runtime <<= (exportedProducts in Compile).identity,
+ quickScalaInstance,
+ target <<= (baseDirectory, name) apply (_ / "target" / _)
+ )
+ lazy val scalaLibrary = Project("scala-library", file(".")) settings(scalaLibArtifactSettings:_*)
+
+ // --------------------------------------------------------------
+ // Real Compiler Artifact
+ // --------------------------------------------------------------
+ lazy val packageScalaBinTask = Seq(quickComp, fjbg, msil).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
+ lazy val scalaBinArtifactSettings : Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaBinTask)) ++ Seq(
+ name := "scala-compiler",
+ crossPaths := false,
+ exportJars := true,
+ autoScalaLibrary := false,
+ unmanagedJars in Compile := Seq(),
+ fullClasspath in Runtime <<= (exportedProducts in Compile).identity,
+ quickScalaInstance,
+ target <<= (baseDirectory, name) apply (_ / "target" / _)
+ )
+ lazy val scalaCompiler = Project("scala-compiler", file(".")) settings(scalaBinArtifactSettings:_*) dependsOn(scalaLibrary)
+ lazy val fullQuickScalaReference = makeScalaReference("pack", scalaLibrary, scalaCompiler, fjbg)
+
+ // --------------------------------------------------------------
+ // Testing
+ // --------------------------------------------------------------
+ /* lazy val scalacheckSettings: Seq[Setting[_]] = Seq(fullQuickScalaReference, crossPaths := false)*/
+ lazy val scalacheck = uri("git://github.com/rickynils/scalacheck.git")
+
+ lazy val testsuiteSettings: Seq[Setting[_]] = compilerDependentProjectSettings ++ partestTaskSettings ++ VerifyClassLoad.settings ++ Seq(
+ unmanagedBase <<= baseDirectory / "test/files/lib",
+ fullClasspath in VerifyClassLoad.checkClassLoad <<= (fullClasspath in scalaLibrary in Runtime).identity,
+ autoScalaLibrary := false,
+ checkSameLibrary <<= checkSameBinaryProjects(quickLib, strappLib),
+ checkSameCompiler <<= checkSameBinaryProjects(quickComp, strappComp),
+ checkSame <<= (checkSameLibrary, checkSameCompiler) map ((a,b) => ()),
+ autoScalaLibrary := false
+ )
+ lazy val continuationsTestsuiteSettings: Seq[Setting[_]] = testsuiteSettings ++ Seq(
+ scalacOptions in Test <++= (exportedProducts in Compile in continuationsPlugin) map {
+ case Seq(cpDir) => Seq("-Xplugin-require:continuations", "-P:continuations:enable", "-Xplugin:"+cpDir.data.getAbsolutePath)
+ },
+ partestDirs <<= baseDirectory apply { bd =>
+ def mkFile(name: String) = bd / "test" / "files" / name
+ def mkTestType(name: String) = name.drop("continuations-".length).toString
+ Seq("continuations-neg", "continuations-run") map (t => mkTestType(t) -> mkFile(t)) toMap
+ }
+ )
+ val testsuite = (
+ Project("testsuite", file("."))
+ settings (testsuiteSettings:_*)
+ dependsOn (swing, scalaLibrary, scalaCompiler, fjbg, partest, scalacheck)
+ )
+ val continuationsTestsuite = (
+ Project("continuations-testsuite", file("."))
+ settings (continuationsTestsuiteSettings:_*)
+ dependsOn (partest, swing, scalaLibrary, scalaCompiler, fjbg)
+ )
+
+ // --------------------------------------------------------------
+ // Generating Documentation.
+ // --------------------------------------------------------------
+
+ // TODO - Migrate this into the dist project.
+ // Scaladocs
+ def distScalaInstance = makeScalaReference("dist", scalaLibrary, scalaCompiler, fjbg)
+ lazy val documentationSettings: Seq[Setting[_]] = dependentProjectSettings ++ Seq(
+ // TODO - Make these work for realz.
+ defaultExcludes in unmanagedSources in Compile := ((".*" - ".") || HiddenFileFilter ||
+ "reflect/Print.scala" ||
+ "reflect/Symbol.scala" ||
+ "reflect/Tree.scala" ||
+ "reflect/Type.scala" ||
+ "runtime/*$.scala" ||
+ "runtime/ScalaRuntime.scala" ||
+ "runtime/StringAdd.scala" ||
+ "scala/swing/test/*"),
+ sourceFilter in Compile := ("*.scala"),
+ unmanagedSourceDirectories in Compile <<= baseDirectory apply { dir =>
+ Seq(dir / "src" / "library" / "scala", dir / "src" / "actors", dir / "src" / "swing", dir / "src" / "continuations" / "library")
+ },
+ compile := inc.Analysis.Empty,
+ scaladocOptions in Compile in doc <++= (baseDirectory) map (bd =>
+ Seq("-sourcepath", (bd / "src" / "library").getAbsolutePath,
+ "-doc-no-compile", (bd / "src" / "library-aux").getAbsolutePath,
+ "-doc-source-url", """https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk/src/€{FILE_PATH}.scala#L1""",
+ "-doc-root-content", (bd / "compiler/scala/tools/nsc/doc/html/resource/lib/rootdoc.txt").getAbsolutePath
+ )),
+ classpathOptions in Compile := ClasspathOptions.manual
+ )
+ lazy val documentation = (
+ Project("documentation", file("."))
+ settings (documentationSettings: _*)
+ dependsOn(quickLib, quickComp, actors, fjbg, forkjoin, swing, continuationsLibrary)
+ )
+
+ // --------------------------------------------------------------
+ // Packaging a distro
+ // --------------------------------------------------------------
+
+ class ScalaToolRunner(classpath: Classpath) {
+ // TODO - Don't use the ant task directly...
+ lazy val classLoader = new java.net.URLClassLoader(classpath.map(_.data.toURI.toURL).toArray, null)
+ lazy val mainClass = classLoader.loadClass("scala.tools.ant.ScalaTool")
+ lazy val executeMethod = mainClass.getMethod("execute")
+ lazy val setFileMethod = mainClass.getMethod("setFile", classOf[java.io.File])
+ lazy val setClassMethod = mainClass.getMethod("setClass", classOf[String])
+ lazy val setClasspathMethod = mainClass.getMethod("setClassPath", classOf[String])
+ lazy val instance = mainClass.newInstance()
+
+ def setClass(cls: String): Unit = setClassMethod.invoke(instance, cls)
+ def setFile(file: File): Unit = setFileMethod.invoke(instance, file)
+ def setClasspath(cp: String): Unit = setClasspathMethod.invoke(instance, cp)
+ def execute(): Unit = executeMethod.invoke(instance)
+ }
+
+ def genBinTask(
+ runner: ScopedTask[ScalaToolRunner],
+ outputDir: ScopedSetting[File],
+ classpath: ScopedTask[Classpath],
+ useClasspath: Boolean
+ ): Project.Initialize[sbt.Task[Map[File,String]]] = {
+ (runner, outputDir, classpath, streams) map { (runner, outDir, cp, s) =>
+ IO.createDirectory(outDir)
+ val classToFilename = Map(
+ "scala.tools.nsc.MainGenericRunner" -> "scala",
+ "scala.tools.nsc.Main" -> "scalac",
+ "scala.tools.nsc.ScalaDoc" -> "scaladoc",
+ "scala.tools.nsc.CompileClient" -> "fsc",
+ "scala.tools.scalap.Main" -> "scalap"
+ )
+ if (useClasspath) {
+ val classpath = Build.data(cp).map(_.getCanonicalPath).distinct.mkString(",")
+ s.log.debug("Setting classpath = " + classpath)
+ runner setClasspath classpath
+ }
+ def genBinFiles(cls: String, dest: File) = {
+ runner.setClass(cls)
+ runner.setFile(dest)
+ runner.execute()
+ // TODO - Mark generated files as executable (755 or a+x) that is *not* JDK6 specific...
+ dest.setExecutable(true)
+ }
+ def makeBinMappings(cls: String, binName: String): Map[File,String] = {
+ val file = outDir / binName
+ val winBinName = binName + ".bat"
+ genBinFiles(cls, file)
+ Map( file -> ("bin/"+binName), outDir / winBinName -> ("bin/"+winBinName) )
+ }
+ classToFilename.flatMap((makeBinMappings _).tupled).toMap
+ }
+ }
+ def runManmakerTask(classpath: ScopedTask[Classpath], scalaRun: ScopedTask[ScalaRun], mainClass: String, dir: String, ext: String): Project.Initialize[Task[Map[File,String]]] =
+ (classpath, scalaRun, streams, target) map { (cp, runner, s, target) =>
+ val binaries = Seq("fsc", "scala", "scalac", "scaladoc", "scalap")
+ binaries map { bin =>
+ val file = target / "man" / dir / (bin + ext)
+ val classname = "scala.man1." + bin
+ IO.createDirectory(file.getParentFile)
+ toError(runner.run(mainClass, Build.data(cp), Seq(classname, file.getAbsolutePath), s.log))
+ file -> ("man/" + dir + "/" + bin + ext)
+ } toMap
+ }
+
+ val genBinRunner = TaskKey[ScalaToolRunner]("gen-bin-runner",
+ "Creates a utility to generate script files for Scala.")
+ val genBin = TaskKey[Map[File,String]]("gen-bin",
+ "Creates script files for Scala distribution.")
+ val binDir = SettingKey[File]("binaries-directory",
+ "Directory where binary scripts will be located.")
+ val genBinQuick = TaskKey[Map[File,String]]("gen-quick-bin",
+ "Creates script files for testing against current Scala build classfiles (not local dist).")
+ val runManmakerMan = TaskKey[Map[File,String]]("make-man",
+ "Runs the man maker project to generate man pages")
+ val runManmakerHtml = TaskKey[Map[File,String]]("make-html",
+ "Runs the man maker project to generate html pages")
+
+ lazy val scalaDistSettings: Seq[Setting[_]] = Seq(
+ crossPaths := false,
+ target <<= (baseDirectory, name) apply (_ / "target" / _),
+ scalaSource in Compile <<= (baseDirectory, name) apply (_ / "src" / _),
+ autoScalaLibrary := false,
+ unmanagedJars in Compile := Seq(),
+ genBinRunner <<= (fullClasspath in quickComp in Runtime) map (new ScalaToolRunner(_)),
+ binDir <<= target(_/"bin"),
+ genBin <<= genBinTask(genBinRunner, binDir, fullClasspath in Runtime, false),
+ binDir in genBinQuick <<= baseDirectory apply (_ / "target" / "bin"),
+ // Configure the classpath this way to avoid having .jar files and previous layers on the classpath.
+ fullClasspath in Runtime in genBinQuick <<= Seq(quickComp,quickLib,scalap,actors,swing,dbc,fjbg,jline,forkjoin).map(classDirectory in Compile in _).join.map(Attributed.blankSeq),
+ fullClasspath in Runtime in genBinQuick <++= (fullClasspath in Compile in jline),
+ genBinQuick <<= genBinTask(genBinRunner, binDir in genBinQuick, fullClasspath in Runtime in genBinQuick, true),
+ runManmakerMan <<= runManmakerTask(fullClasspath in Runtime in manmaker, runner in manmaker, "scala.tools.docutil.EmitManPage", "man1", ".1"),
+ runManmakerHtml <<= runManmakerTask(fullClasspath in Runtime in manmaker, runner in manmaker, "scala.tools.docutil.EmitHtml", "doc", ".html"),
+ // TODO - We could *really* clean this up in many ways. Let's look into making a a Seq of "direct jars" (scalaLibrary, scalaCompiler, jline, scalap)
+ // a seq of "plugin jars" (continuationsPlugin) and "binaries" (genBin) and "documentation" mappings (genBin) that this can aggregate.
+ // really need to figure out a better way to pull jline + jansi.
+ makeDistMappings <<= (genBin,
+ runManmakerMan,
+ runManmakerHtml,
+ packageBin in scalaLibrary in Compile,
+ packageBin in scalaCompiler in Compile,
+ packageBin in jline in Compile,
+ packageBin in continuationsPlugin in Compile,
+ managedClasspath in jline in Compile,
+ packageBin in scalap in Compile) map {
+ (binaries, man, html, lib, comp, jline, continuations, jlineDeps, scalap) =>
+ val jlineDepMap: Seq[(File, String)] = jlineDeps.map(_.data).flatMap(_ x Path.flat) map { case(a,b) => a -> ("lib/"+b) }
+ binaries ++ man ++ html ++ jlineDepMap ++ Seq(
+ lib -> "lib/scala-library.jar",
+ comp -> "lib/scala-compiler.jar",
+ jline -> "lib/jline.jar",
+ continuations -> "misc/scala-devel/plugins/continuations.jar",
+ scalap -> "lib/scalap.jar"
+ ) toMap
+ },
+ // Add in some more dependencies
+ makeDistMappings <<= (makeDistMappings,
+ packageBin in swing in Compile,
+ packageBin in dbc in Compile) map {
+ (dist, s, d) =>
+ dist ++ Seq(s -> "lib/scala-swing.jar", d -> "lib/scala-dbc.jar")
+ },
+ makeDist <<= (makeDistMappings, baseDirectory, streams) map { (maps, dir, s) =>
+ s.log.debug("Map = " + maps.mkString("\n"))
+ val file = dir / "target" / "scala-dist.zip"
+ IO.zip(maps, file)
+ s.log.info("Created " + file.getAbsolutePath)
+ file
+ },
+ makeExplodedDist <<= (makeDistMappings, target, streams) map { (maps, dir, s) =>
+ def sameFile(f: File, f2: File) = f.getCanonicalPath == f2.getCanonicalPath
+ IO.createDirectory(dir)
+ IO.copy(for {
+ (file, name) <- maps
+ val file2 = dir / name
+ if !sameFile(file,file2)
+ } yield (file, file2))
+ // Hack to make binaries be executable. TODO - Fix for JDK 5 and below...
+ maps.values filter (_ startsWith "bin/") foreach (dir / _ setExecutable true)
+ dir
+ }
+ )
+ lazy val scaladist = (
+ Project("dist", file("."))
+ settings (scalaDistSettings: _*)
+ )
+}
+
+/** Matcher to make updated remote project references easier. */
+object ScopedExternalSetting {
+ def unapply[T](s: Setting[_]): Option[(URI, AttributeKey[_], Setting[_])] =
+ s.key.scope.project match {
+ case Select(p @ ProjectRef(uri, _)) => Some((uri, s.key.key, s))
+ case _ => None
+ }
+}
diff --git a/project/Layers.scala b/project/Layers.scala
new file mode 100644
index 0000000000..cdd68554ed
--- /dev/null
+++ b/project/Layers.scala
@@ -0,0 +1,90 @@
+import sbt._
+import Keys._
+/** This trait stores all the helper methods to generate layers in Scala's layered build. */
+trait Layers extends Build {
+ // TODO - Clean this up or use a self-type.
+
+ /** Default SBT overrides needed for layered compilation. */
+ def settingOverrides: Seq[Setting[_]]
+ /** Reference to the jline project */
+ def jline: Project
+ /** Reference to forkjoin library */
+ def forkjoin: Project
+ /** Reference to Fast-Java-Bytecode-Generator library */
+ def fjbg: Project
+ /** Reference to MSIL generator library */
+ def msil: Project
+ /** A setting that adds some external dependencies. */
+ def externalDeps: Setting[_]
+
+ /** Creates a reference Scala version that can be used to build other projects. This takes in the raw
+ * library, compiler and fjbg libraries as well as a string representing the layer name (used for compiling the compile-interface).
+ */
+ def makeScalaReference(layer : String, library: Project, compiler: Project, fjbg: Project) =
+ scalaInstance <<= (appConfiguration in library,
+ version in library,
+ (exportedProducts in library in Compile),
+ (exportedProducts in compiler in Compile),
+ (exportedProducts in fjbg in Compile),
+ (fullClasspath in jline in Runtime)) map {
+ (app, version: String, lib: Classpath, comp: Classpath, fjbg: Classpath, jline: Classpath) =>
+ val launcher = app.provider.scalaProvider.launcher
+ (lib,comp) match {
+ case (Seq(libraryJar), Seq(compilerJar)) =>
+ ScalaInstance(
+ version + "-" + layer + "-",
+ libraryJar.data,
+ compilerJar.data,
+ launcher,
+ ((fjbg.files++jline.files):_*))
+ case _ => error("Cannot build a ScalaReference with more than one classpath element")
+ }
+ }
+
+ /** Creates a "layer" of Scala compilation. That is, this will build the next version of Scala from a previous version.
+ * Returns the library project and compiler project from the next layer.
+ * Note: The library and compiler are not *complete* in the sense that they are missing things like "actors" and "fjbg".
+ */
+ def makeLayer(layer: String, referenceScala: Setting[Task[ScalaInstance]]) : (Project, Project) = {
+ val library = Project(layer + "-library", file(".")) settings( (settingOverrides ++
+ Seq(version := layer,
+ // TODO - use depends on.
+ unmanagedClasspath in Compile <<= (exportedProducts in forkjoin in Compile).identity,
+ managedClasspath in Compile := Seq(),
+ scalaSource in Compile <<= (baseDirectory) apply (_ / "src" / "library"),
+ resourceDirectory in Compile <<= baseDirectory apply (_ / "src" / "library"),
+ defaultExcludes in unmanagedResources := ("*.scala" | "*.java" | "*.disabled"),
+ // TODO - Allow other scalac option settings.
+ scalacOptions in Compile <++= (scalaSource in Compile) map (src => Seq("-sourcepath", src.getAbsolutePath)),
+ classpathOptions := ClasspathOptions.manual,
+ resourceGenerators in Compile <+= (baseDirectory, version, resourceManaged) map Release.generatePropertiesFile("library.properties"),
+ referenceScala
+ )) :_*)
+
+ // Define the compiler
+ val compiler = Project(layer + "-compiler", file(".")) settings((settingOverrides ++
+ Seq(version := layer,
+ scalaSource in Compile <<= (baseDirectory) apply (_ / "src" / "compiler"),
+ resourceDirectory in Compile <<= baseDirectory apply (_ / "src" / "compiler"),
+ defaultExcludes in unmanagedResources := "*.scala",
+ resourceGenerators in Compile <+= (baseDirectory, version, resourceManaged) map Release.generatePropertiesFile("compiler.properties"),
+ // Note, we might be able to use the default task, but for some reason ant was filtering files out. Not sure what's up, but we'll
+ // stick with that for now.
+ unmanagedResources in Compile <<= (baseDirectory) map {
+ (bd) =>
+ val dirs = Seq(bd / "src" / "compiler")
+ dirs.descendentsExcept( ("*.xml" | "*.html" | "*.gif" | "*.png" | "*.js" | "*.css" | "*.tmpl" | "*.swf" | "*.properties" | "*.txt"),"*.scala").get
+ },
+ // TODO - Use depends on *and* SBT's magic dependency mechanisms...
+ unmanagedClasspath in Compile <<= Seq(forkjoin, library, fjbg, jline, msil).map(exportedProducts in Compile in _).join.map(_.flatten),
+ classpathOptions := ClasspathOptions.manual,
+ externalDeps,
+ referenceScala
+ )
+ ):_*)
+
+ // Return the generated projects.
+ (library, compiler)
+ }
+
+}
diff --git a/project/Partest.scala b/project/Partest.scala
new file mode 100644
index 0000000000..6fc5e11958
--- /dev/null
+++ b/project/Partest.scala
@@ -0,0 +1,141 @@
+import sbt._
+
+import Build._
+import Keys._
+import Project.Initialize
+import complete._
+import scala.collection.{ mutable, immutable }
+
+/** This object */
+object partest {
+
+ /** The key for the run-partest task that exists in Scala's test suite. */
+ lazy val runPartest = TaskKey[Unit]("run-partest", "Runs the partest test suite against the quick.")
+ lazy val runPartestSingle = InputKey[Unit]("run-partest-single", "Runs a single partest test against quick.")
+ lazy val runPartestFailed = TaskKey[Unit]("run-partest-failed", "Runs failed partest tests.")
+ lazy val runPartestGrep = InputKey[Unit]("run-partest-grep", "Runs a single partest test against quick.")
+ lazy val partestRunner = TaskKey[PartestRunner]("partest-runner", "Creates a runner that can run partest suites")
+ lazy val partestTests = TaskKey[Map[String, Seq[File]]]("partest-tests", "Creates a map of test-type to a sequence of the test files/directoryies to test.")
+ lazy val partestDirs = SettingKey[Map[String,File]]("partest-dirs", "The map of partest test type to directory associated with that test type")
+
+ lazy val partestTaskSettings: Seq[Setting[_]] = Seq(
+ javaOptions in partestRunner := Seq("-Xmx512M -Xms256M"),
+ partestDirs <<= baseDirectory apply { bd =>
+ partestTestTypes map (kind => kind -> (bd / "test" / "files" / kind)) toMap
+ },
+ partestRunner <<= partestRunnerTask(fullClasspath in Runtime, javaOptions in partestRunner),
+ partestTests <<= partestTestsTask(partestDirs),
+ runPartest <<= runPartestTask(partestRunner, partestTests, scalacOptions in Test),
+ runPartestSingle <<= runSingleTestTask(partestRunner, partestDirs, scalacOptions in Test),
+ runPartestFailed <<= runPartestTask(partestRunner, partestTests, scalacOptions in Test, Seq("--failed"))
+ )
+
+ // What's fun here is that we want "*.scala" files *and* directories in the base directory...
+ def partestResources(base: File, testType: String): PathFinder = testType match {
+ case "res" => base ** "*.res"
+ case "buildmanager" => base * "*"
+ // TODO - Only allow directories that have "*.scala" children...
+ case _ => base * "*" filter { f => !f.getName.endsWith(".obj") && (f.isDirectory || f.getName.endsWith(".scala")) }
+ }
+ lazy val partestTestTypes = Seq("run", "jvm", "pos", "neg", "buildmanager", "res", "shootout", "scalap", "specialized", "presentation", "scalacheck")
+
+ // TODO - Figure out how to specify only a subset of resources...
+ def partestTestsTask(testDirs: ScopedSetting[Map[String,File]]): Project.Initialize[Task[Map[String, Seq[File]]]] =
+ testDirs map (m => m map { case (kind, dir) => kind -> partestResources(dir, kind).get })
+
+ // TODO - Split partest task into Configurations and build a Task for each Configuration.
+ // *then* mix all of them together for run-testsuite or something clever like this.
+ def runPartestTask(runner: ScopedTask[PartestRunner], testRuns: ScopedTask[Map[String,Seq[File]]], scalacOptions: ScopedTask[Seq[String]], extraArgs: Seq[String] = Seq()): Initialize[Task[Unit]] = {
+ (runner, testRuns, scalacOptions, streams) map {
+ (runner, runs, scalaOpts, s) => runPartestImpl(runner, runs, scalaOpts, s, extraArgs)
+ }
+ }
+ private def runPartestImpl(runner: PartestRunner, runs: Map[String, Seq[File]], scalacOptions: Seq[String], s: TaskStreams, extras: Seq[String] = Seq()): Unit = {
+ val testArgs = runs.toSeq collect { case (kind, files) if files.nonEmpty => Seq("-" + kind, files mkString ",") } flatten
+ val extraArgs = scalacOptions flatMap (opt => Seq("-scalacoption", opt))
+
+ import collection.JavaConverters._
+ val results = runner run Array(testArgs ++ extraArgs ++ extras: _*) asScala
+ // TODO - save results
+ val failures = results collect {
+ case (path, 1) => path + " [FAILED]"
+ case (path, 2) => path + " [TIMEOUT]"
+ }
+
+ if (failures.isEmpty)
+ s.log.info(""+results.size+" tests passed.")
+ else {
+ failures foreach (s.log error _)
+ error("Test Failures! ("+failures.size+" of "+results.size+")")
+ }
+ }
+
+ def convertTestsForAutoComplete(tests: Map[String, Seq[File]]): (Set[String], Set[String]) =
+ (tests.keys.toSet, tests.values flatMap (_ map cleanFileName) toSet)
+
+ /** Takes a test file, as sent ot Partest, and cleans it up for auto-complete */
+ def cleanFileName(file: File): String = {
+ // TODO - Something intelligent here
+ val TestPattern = ".*/test/(.*)".r
+ file.getCanonicalPath match {
+ case TestPattern(n) => n
+ case _ => file.getName
+ }
+ }
+
+ // TODO - Allow a filter for the second part of this...
+ def runSingleTestParser(testDirs: Map[String, File]): State => Parser[(String, String)] = {
+ import DefaultParsers._
+ state => {
+ Space ~> token(NotSpace examples testDirs.keys.toSet) flatMap { kind =>
+ val files: Set[String] = testDirs get kind match {
+ case Some(dir) =>
+ partestResources(dir, kind).get flatMap (_ relativeTo dir) map (_ getName) toSet
+ case _ =>
+ Set()
+ }
+ Space ~> token(NotSpace examples files) map (kind -> _)
+ }
+ }
+ }
+
+ def runSingleTestTask(runner: ScopedTask[PartestRunner], testDirs: ScopedSetting[Map[String, File]], scalacOptions: ScopedTask[Seq[String]]) : Initialize[InputTask[Unit]] = {
+ import sbinary.DefaultProtocol._
+
+ InputTask(testDirs apply runSingleTestParser) { result =>
+ (runner, result, testDirs, scalacOptions, streams) map {
+ case (r, (kind, filter), dirs, o, s) =>
+ // TODO - Use partest resources somehow to filter the filter correctly....
+ val files: Seq[File] =
+ if (filter == "*") partestResources(dirs(kind), kind).get
+ else (dirs(kind) * filter).get
+
+ runPartestImpl(r, Map(kind -> files), o, s)
+ }
+ }
+ }
+
+ def partestRunnerTask(classpath: ScopedTask[Classpath], javacOptions: ScopedSetting[Seq[String]]): Project.Initialize[Task[PartestRunner]] =
+ (classpath, javacOptions) map ((cp, opts) => new PartestRunner(Build.data(cp), opts mkString " "))
+}
+
+class PartestRunner(classpath: Seq[File], javaOpts: String) {
+ // Classloader that does *not* have this as parent, for differing Scala version.
+ lazy val classLoader = new java.net.URLClassLoader(classpath.map(_.toURI.toURL).toArray, null)
+ lazy val (mainClass, mainMethod) = try {
+ val c = classLoader.loadClass("scala.tools.partest.nest.SBTRunner")
+ val m = c.getMethod("mainReflect", classOf[Array[String]])
+ (c,m)
+ }
+ lazy val classPathArgs = Seq("-cp", classpath.map(_.getAbsoluteFile).mkString(java.io.File.pathSeparator))
+ def run(args: Array[String]): java.util.Map[String,Int] = try {
+ // TODO - undo this settings after running. Also globals are bad.
+ System.setProperty("partest.java_opts", javaOpts)
+ val allArgs = (classPathArgs ++ args).toArray
+ mainMethod.invoke(null, allArgs).asInstanceOf[java.util.Map[String,Int]]
+ } catch {
+ case e =>
+ //error("Could not run Partest: " + e)
+ throw e
+ }
+}
diff --git a/project/Release.scala b/project/Release.scala
new file mode 100644
index 0000000000..ff1bdfbd31
--- /dev/null
+++ b/project/Release.scala
@@ -0,0 +1,94 @@
+import sbt._
+import Keys._
+
+object Release {
+
+ // TODO - move more of the dist project over here...
+
+
+ lazy val pushStarr = Command.command("push-starr") { (state: State) =>
+ def f(s: Setting[_]): Setting[_] = s.key.key match {
+ case version.key => // TODO - use full version
+ s.asInstanceOf[Setting[String]].mapInit( (_,_) => timeFormat format (new java.util.Date))
+ case organization.key =>
+ s.asInstanceOf[Setting[String]].mapInit( (_,_) => "org.scala-lang.bootstrapp")
+ // TODO - Switch publish repo to be typesafe starr repo.
+ case publishTo.key =>
+ s.asInstanceOf[Setting[Option[Resolver]]].mapInit((_,_) => Some("Starr Repo" at "http://typesafe.artifactoryonline.com/typesafe/starr-releases/"))
+ case _ => s
+ }
+ val extracted = Project.extract(state)
+ import extracted._
+ // Swap version on projects
+ val transformed = session.mergeSettings map ( s => f(s) )
+ val newStructure = Load.reapply(transformed, structure)
+ val newState = Project.setProject(session, newStructure, state)
+ // TODO - Run tasks. Specifically, push scala-compiler + scala-library. *Then* bump the STARR version locally.
+ // The final course of this command should be:
+ // publish-local
+ // Project.evaluateTask(publishLocal, newState)
+ // bump STARR version setting
+ // TODO - Define Task
+ // Rebuild quick + test to ensure it works
+ // Project.evaluateTask(test, newState)
+ // push STARR remotely
+ Project.evaluateTask(publish, newState)
+ // Revert to previous project state.
+ Project.setProject(session, structure, state)
+ }
+
+ // TODO - Autocomplete
+ /*lazy val setStarrHome = Command.single("set-starr-home") { (state: State, homeDir: String) =>
+ def f(s: Setting[_]): Setting[_] =
+ if(s.key.key == scalaInstance.key) {
+ s.asInstanceOf[Setting[ScalaInstance]] mapInit { (key, value) =>
+ if(value.version == "starr")
+ scalaInstance <<= appConfiguration map { app =>
+ val launcher = app.provider.scalaProvider.launcher
+ ScalaInstance("starr", new File(homeDir), launcher)
+ }
+ else value
+ }
+ } else s
+ val extracted = Project.extract(state)
+ import extracted._
+ val transformed = session.mergeSettings map f
+ val newStructure = Load.reapply(transformed, structure)
+ Project.setProject(session, newStructure, state)
+ }*/
+
+ lazy val timeFormat = {
+ val formatter = new java.text.SimpleDateFormat("yyyyMMdd'T'HHmmss")
+ formatter.setTimeZone(java.util.TimeZone.getTimeZone("GMT"))
+ formatter
+ }
+
+ /** This generates a properties file, if it does not already exist, with the maximum lastmodified timestamp
+ * of any source file. */
+ def generatePropertiesFile(name: String)(baseDirectory: File, version: String, dir: File): Seq[File] = {
+ val target = dir / name
+ // TODO - Regenerate on triggers, like recompilation or something...
+ // TODO - also think about pulling git last-commit for this...
+ if (!target.exists) {
+ val ts = getLastModified(baseDirectory)
+ val formatter = new java.text.SimpleDateFormat("yyyyMMdd'T'HHmmss")
+ formatter.setTimeZone(java.util.TimeZone.getTimeZone("GMT"))
+ val fullVersion = version + "." + formatter.format(new java.util.Date(ts))
+ makePropertiesFile(target, fullVersion)
+ }
+ target :: Nil
+ }
+
+ // This creates the *.properties file used to determine the current version of scala at runtime. TODO - move these somewhere utility like.
+ def makePropertiesFile(f: File, version: String): Unit =
+ IO.write(f, "version.number = "+version+"\ncopyright.string = Copyright 2002-2011, LAMP/EPFL")
+
+ def makeFullVersionString(baseDirectory: File, baseVersion: String) = baseVersion+"."+getLastModified(baseDirectory)
+
+ // TODO - Something that doesn't take so long...
+ def allSourceFiles(baseDirectory: File) = (baseDirectory / "src") ** ("*.scala" | "*.java" )
+
+ def getLastModified(baseDirectory: File) =
+ allSourceFiles(baseDirectory).get.map(_.lastModified).max
+
+}
diff --git a/project/Sametest.scala b/project/Sametest.scala
new file mode 100644
index 0000000000..f44fe8ec65
--- /dev/null
+++ b/project/Sametest.scala
@@ -0,0 +1,66 @@
+import sbt._
+
+import Build._
+import Keys._
+
+// This code is adapted from scala.tools.ant.Same by Gilles Dubochet.
+object SameTest {
+ lazy val checkSame: TaskKey[Unit] = TaskKey("check-same-binaries", "checks whether or not the class files generated by scala are the same.")
+ lazy val checkSameLibrary: TaskKey[Unit] = TaskKey("check-same-lib-binaries", "checks whether or not the librayr class files generated by scala are the same.")
+ lazy val checkSameCompiler: TaskKey[Unit] = TaskKey("check-same-comp-binaries", "checks whether or not the compiler class files generated by scala are the same.")
+
+ def checkSameBinaryProjects(lhs: Project, rhs: Project): Project.Initialize[Task[Unit]] =
+ (classDirectory in Compile in lhs, classDirectory in Compile in rhs,
+ compile in Compile in lhs, compile in Compile in rhs, streams) map { (lhs,rhs, _, _, s) =>
+ // Now we generate a complete set of relative files and then
+ def relativeClasses(dir: File) = (dir ** "*.class").get.flatMap(IO.relativize(dir,_).toList)
+ // This code adapted from SameTask in the compiler.
+ def hasDifferentFiles(filePairs: Seq[(File,File)]): Boolean = {
+ filePairs exists { case (a,b) =>
+ if (!a.canRead || !b.canRead) {
+ s.log.error("Either ["+a+"] or ["+b+"] is missing.")
+ true
+ } else {
+ s.log.debug("Checking for binary differences in ["+a+"] against ["+b+"].")
+ val diff = !checkSingleFilePair(a,b)
+ if(diff) s.log.error("["+a+"] differs from ["+b+"]")
+ diff
+ }
+ }
+ }
+ val allClassMappings = (relativeClasses(lhs) ++ relativeClasses(rhs)).distinct
+ val comparisons = allClassMappings.map(f => new File(lhs, f) -> new File(rhs, f))
+ val result = hasDifferentFiles(comparisons)
+ if (result) error("Binary artifacts differ.")
+ }
+
+ val bufferSize = 1024
+
+ // Tests whether two files are binary equivalents of each other.
+ def checkSingleFilePair(originFile: File, destFile: File): Boolean = {
+ Using.fileInputStream(originFile) { originStream =>
+ Using.fileInputStream(destFile) { destStream =>
+ val originBuffer = new Array[Byte](bufferSize)
+ val destBuffer = new Array[Byte](bufferSize)
+ var equalNow = true
+ var originRemaining = originStream.read(originBuffer)
+ var destRemaining = destStream.read(destBuffer)
+ while (originRemaining > 0 && equalNow) {
+ if (originRemaining == destRemaining) {
+ for (idx <- 0 until originRemaining) {
+ equalNow = equalNow && (originBuffer(idx) == destBuffer(idx))
+ }
+ } else {
+ equalNow = false
+ }
+ originRemaining = originStream.read(originBuffer)
+ destRemaining = destStream.read(destBuffer)
+ }
+ if (destRemaining > 0) equalNow = false
+ equalNow
+ }
+ }
+ }
+
+
+}
diff --git a/project/VerifyClassLoad.scala b/project/VerifyClassLoad.scala
new file mode 100644
index 0000000000..c8eebb1159
--- /dev/null
+++ b/project/VerifyClassLoad.scala
@@ -0,0 +1,46 @@
+import sbt._
+
+import Build._
+import Keys._
+
+// This is helper code to validate that generated class files will succed in bytecode verification at class-load time.
+object VerifyClassLoad {
+ lazy val checkClassLoad: TaskKey[Unit] = TaskKey("check-class-load", "checks whether or not the class files generated by scala are deemed acceptable by classloaders.")
+ lazy val checkClassRunner: TaskKey[ClassVerifyRunner] = TaskKey("check-class-runner", "A wrapper around reflective calls to the VerifyClass class.")
+
+
+ def settings: Seq[Setting[_]] = Seq(
+ checkClassRunner <<= (fullClasspath in Runtime) map (cp => new ClassVerifyRunner(data(cp))),
+ fullClasspath in checkClassLoad := Seq(),
+ checkClassLoad <<= (checkClassRunner, fullClasspath in checkClassLoad, streams) map { (runner, dirs, s) =>
+ import collection.JavaConverters._
+ val results = runner.run(data(dirs).map(_.getAbsolutePath).toArray).asScala
+
+ s.log.info("Processed " + results.size + " classes.")
+ val errors = results.filter(_._2 != null)
+ for( (name, result) <- results; if result != null) {
+ s.log.error(name + " had error: " + result)
+ }
+ if(errors.size > 0) error("Classload validation errors encountered")
+ ()
+ }
+ )
+
+ // TODO - Use
+ class ClassVerifyRunner(classpath: Seq[File]) {
+ // Classloader that does *not* have this as parent, for differing Scala version.
+ lazy val classLoader = new java.net.URLClassLoader(classpath.map(_.toURI.toURL).toArray, null)
+ lazy val (mainClass, mainMethod) = try {
+ val c = classLoader.loadClass("scala.tools.util.VerifyClass")
+ val m = c.getMethod("run", classOf[Array[String]])
+ (c,m)
+ }
+ def run(args: Array[String]): java.util.Map[String,String] = try {
+ mainMethod.invoke(null, args).asInstanceOf[java.util.Map[String,String]]
+ } catch {
+ case e =>
+ //error("Could not run Partest: " + e)
+ throw e
+ }
+ }
+}
diff --git a/project/build.properties b/project/build.properties
deleted file mode 100644
index 4775404a76..0000000000
--- a/project/build.properties
+++ /dev/null
@@ -1,11 +0,0 @@
-#Project properties
-#Sun Apr 11 14:24:47 CEST 2010
-project.name=scala
-def.scala.version=2.7.7
-sbt.version=0.7.7
-copyright=Copyright 2002-2011, LAMP/EPFL
-build.scala.versions=2.7.7
-project.initialize=false
-project.organization=ch.epfl.lamp
-partest.version.number=0.9.2
-project.version=2.8.1
diff --git a/project/build/AdditionalResources.scala b/project/build/AdditionalResources.scala
deleted file mode 100644
index d83d45b218..0000000000
--- a/project/build/AdditionalResources.scala
+++ /dev/null
@@ -1,81 +0,0 @@
-import sbt._
-import java.util.jar.{Manifest}
-import java.io.{FileInputStream}
-import AdditionalResources._
-/**
- * Additional tasks that are required to obtain a complete compiler and library pair, but that are not part of the
- * compilation task. It copies additional files and generates the properties files
- * @author Grégory Moix
- */
-trait AdditionalResources {
- self : BasicLayer =>
-
- def writeProperties: Option[String] = {
- def write0(steps: List[Step]): Option[String] = steps match {
- case x :: xs => x match {
- case c: PropertiesToWrite => {
- c.writeProperties orElse write0(xs)
- }
- case _ => write0(xs)
- }
- case Nil => None
- }
- write0(allSteps.topologicalSort)
- }
-}
-
-object AdditionalResources {
- /**
- * A FileFilter that defines what are the files that will be copied
- */
- lazy val basicFilter = "*.tmpl" | "*.xml" | "*.js" | "*.css" | "*.properties" | "*.swf" | "*.png"
- implicit def stringToGlob(s: String): NameFilter = GlobFilter(s)
-}
-
-trait ResourcesToCopy {
- self : CompilationStep =>
-
- def getResources(from: Path, filter: FileFilter): PathFinder = (from ##)** filter
- def getResources(from: Path): PathFinder = getResources(from, AdditionalResources.basicFilter)
-
- def copyDestination: Path
- def filesToCopy: PathFinder
-
- def copy = {
- log.info("Copying files for "+name)
- try { FileUtilities.copy(filesToCopy.get, copyDestination, log) }
- catch { case e => Some(e.toString) }
-
- None
- }
-}
-
-trait PropertiesToWrite {
- self : CompilationStep =>
-
- def propertyList: List[(String, String)]
- def propertyDestination: Path
-
- def writeProperties: Option[String] ={
- import java.io._
- import java.util.Properties
-
- val properties = new Properties
-
- def insert(list: List[(String, String)]): Unit =
- list foreach { case (k, v) => properties.setProperty(k, v) }
-
- try {
- insert(propertyList)
- val destFile = propertyDestination.asFile
- val stream = new FileOutputStream(destFile)
- properties.store(stream, null)
- }
- catch {
- case e: Exception => Some(e.toString)
- }
- None
- }
-
-}
-
diff --git a/project/build/BasicLayer.scala b/project/build/BasicLayer.scala
deleted file mode 100644
index b333131d51..0000000000
--- a/project/build/BasicLayer.scala
+++ /dev/null
@@ -1,296 +0,0 @@
-import sbt._
-import xsbt.ScalaInstance
-import ScalaBuildProject._
-
-/**
- * Basic tasks and configuration shared by all layers. This class regroups the configuration and behaviour
- * shared by all layers.
- * @author Grégory Moix
- */
-abstract class BasicLayer(val info: ProjectInfo, val versionNumber: String, previousLayer: Option[BasicLayer])
- extends ScalaBuildProject
- with ReflectiveProject
- with AdditionalResources
- with LayerCompilation
- with BuildInfoEnvironment
- with ForkSBT {
- layer =>
-
- // All path values must be lazy in order to avoid initialization issues (sbt way of doing things)
-
- def buildInfoEnvironmentLocation: Path = outputRootPath / ("build-"+name+".properties")
-
- val forkProperty = "scala.sbt.forked"
- def isDebug = info.logger atLevel Level.Debug
- def isForked = System.getProperty(forkProperty) != null
-
- // Support of triggered execution at project level
- override def watchPaths = info.projectPath / "src" ** ("*.scala" || "*.java" || AdditionalResources.basicFilter)
- override def dependencies = info.dependencies
-
- lazy val copyright = property[String]
- lazy val partestVersionNumber = property[Version]
-
- lazy val nextLayer: Option[BasicLayer] = None
- def packingDestination : Path = layerOutput / "pack"
- lazy val libsDestination = packingDestination/ "lib"
- lazy val packedStarrOutput = outputRootPath / "pasta"
- lazy val requiredPluginsDirForCompilation = layerOutput / "misc" / "scala-devel" / "plugins"
-
- def compilerAdditionalJars: List[Path] = Nil
- def libraryAdditionalJars: List[Path] = Nil
-
- // TASKS
-
- /**
- * Before compiling the layer, we need to check that the previous layer
- * was created correctly and compile it if necessary
- */
- lazy val startLayer = previousLayer match {
- case Some(previous) => task(None) dependsOn previous.finishLayer
- case _ => task(None)
- }
-
- def buildLayer = externalCompilation orElse writeProperties
-
- lazy val build = compile
-
- lazy val compile = task(buildLayer) dependsOn startLayer
-
- /**
- * Finish the compilation and ressources copy and generation
- * It does nothing in itself. As sbt doesn't support conditional dependencies,
- * it permit locker to override it in order to lock the layer when the compilation
- * is finished.
- */
- lazy val finishLayer: ManagedTask = task(None) dependsOn compile
-
- def cleaningList = List(
- layerOutput,
- layerEnvironment.envBackingPath,
- packingDestination
- )
-
- def cleanFiles = FileUtilities.clean(cleaningList, true, log)
-
- // We use super.task, so cleaning is done in every case, even when locked
- lazy val clean: Task = nextLayer match {
- case Some(next) => super.task(cleanFiles) dependsOn next.clean
- case _ => super.task(cleanFiles)
- }
- lazy val cleanBuild = task(cleanFiles orElse buildLayer) dependsOn startLayer
-
- // Utility methods (for quick access)
- def actorsOutput = actorsConfig.outputDirectory
- def actorsSrcDir = actorsConfig.srcDir
- def compilerOutput = compilerConfig.outputDirectory
- def compilerSrcDir = compilerConfig.srcDir
- def dbcOutput = dbcConfig.outputDirectory
- def libraryOutput = libraryConfig.outputDirectory
- def librarySrcDir = libraryConfig.srcDir
- def outputCompilerJar = compilerConfig.packagingConfig.jarDestination
- def outputLibraryJar = libraryWS.packagingConfig.jarDestination
- def outputPartestJar = partestConfig.packagingConfig.jarDestination
- def outputScalapJar = scalapConfig.packagingConfig.jarDestination
- def scalapOutput = scalapConfig.outputDirectory
- def swingOutput = swingConfig.outputDirectory
- def swingSrcDir = swingConfig.srcDir
-
- // CONFIGURATION OF THE COMPILATION STEPS
-
- /**
- * Configuration of the core library compilation
- */
- lazy val libraryConfig = new CompilationStep("library", pathLayout , log) with ResourcesToCopy with PropertiesToWrite {
- def label = "["+layer.name+"] library"
- def options: Seq[String] = Seq("-sourcepath", pathConfig.sources.absolutePath.toString)
- def dependencies = Nil
- override def classpath = super.classpath +++ forkJoinJar
-
- def copyDestination = outputDirectory
- def filesToCopy = getResources(srcDir)
-
- def propertyDestination = outputDirectory / "library.properties"
- def propertyList = ("version.number",versionNumber) :: ("copyright.string", copyright.value) :: Nil
- }
-
- /**
- * Configuration of the compiler
- */
- lazy val compilerConfig = new CompilationStep("compiler", pathLayout, log) with ResourcesToCopy with PropertiesToWrite with Packaging {
- def label = "["+layer.name+"] compiler"
- private def bootClassPath : String = {
- System.getProperty("sun.boot.class.path")
- }
- override def classpath: PathFinder = super.classpath +++ fjbgJar +++ msilJar +++ jlineJar +++ antJar +++ forkJoinJar
- def options = Seq("-bootclasspath", bootClassPath)
- def dependencies = if (minimalCompilation) libraryConfig :: Nil else libraryConfig :: actorsConfig :: dbcConfig :: swingConfig :: Nil
-
- def copyDestination = outputDirectory
- def filesToCopy = getResources(srcDir)
-
- def propertyDestination = outputDirectory / "compiler.properties"
- def propertyList = ("version.number",versionNumber) :: ("copyright.string", copyright.value) :: Nil
-
- lazy val packagingConfig = {
- import java.util.jar.Manifest
- import java.io.FileInputStream
- val manifest = new Manifest(new FileInputStream(manifestPath.asFile))
- new PackagingConfiguration(libsDestination / compilerJarName, List(outputDirectory ##), manifest , compilerAdditionalJars)
- }
- lazy val starrPackagingConfig = new PackagingConfiguration(packedStarrOutput/compilerJarName, List(outputDirectory ##))
-
- }
-
- //// ADDTIONNAL LIBRARIES ////
-
- /**
- * Config of the actors library
- */
- lazy val actorsConfig = new CompilationStep ("actors", pathLayout, log){
- def label = "["+layer.name+"] actors library"
- override def classpath: PathFinder = super.classpath +++ forkJoinJar
- def options: Seq[String] = Seq()
- def dependencies = libraryConfig :: Nil
- }
-
- /**
- * Config of the dbc library
- */
- lazy val dbcConfig = new CompilationStep("dbc", pathLayout, log) with Packaging {
- def label = "["+layer.name+"] dbc library"
- def options: Seq[String] = Seq()
- def dependencies = libraryConfig :: Nil
-
- lazy val packagingConfig = new PackagingConfiguration(
- libsDestination / dbcJarName,
- List(outputDirectory ##)
- )
- }
-
- /**
- * Config of the swing library
- */
- lazy val swingConfig = new CompilationStep("swing", pathLayout, log) with Packaging {
- def label = "["+layer.name+"] swing library"
- def options: Seq[String] = Seq()
- def dependencies = libraryConfig :: actorsConfig :: Nil
-
- lazy val packagingConfig = new PackagingConfiguration(
- libsDestination / swingJarName,
- List(outputDirectory ##)
- )
- }
-
- ///// TOOLS CONFIGURATION ////////
-
- /**
- * Configuration of scalacheck
- */
- lazy val scalacheckConfig = new CompilationStep("scalacheck", pathLayout, log) with Packaging {
- def label = "["+layer.name+"] scalacheck"
- def options: Seq[String] = Seq()
- def dependencies = libraryConfig :: compilerConfig :: actorsConfig :: Nil
-
- lazy val packagingConfig = new PackagingConfiguration(
- libsDestination / scalacheckJarName,
- List(outputDirectory ##)
- )
- }
-
- /**
- * Configuration of scalap tool
- */
- lazy val scalapConfig = new CompilationStep("scalap", pathLayout, log) with Packaging {
- def label = "["+layer.name+"] scalap"
- def options: Seq[String] = Seq()
- def dependencies = libraryConfig :: compilerConfig :: Nil
-
- val decoderProperties = (srcDir ## ) / "decoder.properties"
-
- lazy val packagingConfig = new PackagingConfiguration(
- libsDestination / scalapJarName,
- List(outputDirectory ##, decoderProperties)
- )
- }
-
- /**
- * Configuration of the partest tool
- */
- lazy val partestConfig = new CompilationStep("partest", pathLayout, log) with ResourcesToCopy with PropertiesToWrite with Packaging {
- def label = "["+layer.name+"] partest"
- override def classpath: PathFinder = super.classpath +++ antJar +++ forkJoinJar
- def options: Seq[String] = Seq()
- def dependencies = libraryConfig :: compilerConfig :: scalapConfig :: actorsConfig :: Nil
-
- def copyDestination = outputDirectory
- def filesToCopy = getResources(srcDir)
-
- def propertyDestination = outputDirectory / "partest.properties"
- def propertyList = List(
- ("version.number", partestVersionNumber.value.toString),
- ("copyright.string", copyright.value)
- )
-
- lazy val packagingConfig = new PackagingConfiguration(libsDestination / partestJarName, List(outputDirectory ##))
-
- }
-
- ///// PLUGINS CONFIGURATION ////////
-
- lazy val continuationPluginConfig = {
- val config = new PathConfig {
- def projectRoot: Path = pathLayout.projectRoot
- def sources: Path = pathLayout.srcDir / "continuations" / "plugin"
- def analysis: Path = pathLayout.analysisOutput / "continuations" / "plugin"
- def output: Path = pathLayout.classesOutput / "continuations" / "plugin"
- }
-
- new CompilationStep("continuation-plugin", config, log) with ResourcesToCopy with EarlyPackaging {
- def label = "["+layer.name+"] continuation plugin"
- def dependencies = libraryConfig :: compilerConfig :: Nil
- def options = Seq()
-
- def filesToCopy = (sourceRoots ##) / "scalac-plugin.xml"
- def copyDestination = outputDirectory
- def jarContent = List(outputDirectory ##)
- lazy val packagingConfig = new PackagingConfiguration(
- requiredPluginsDirForCompilation/"continuations.jar",
- List(outputDirectory ##)
- )
- lazy val earlyPackagingConfig = new PackagingConfiguration(
- pathLayout.outputDir / "misc" / "scala-devel" / "plugins" / "continuations.jar",
- List(outputDirectory ##)
- )
- }
- }
-
- lazy val continuationLibraryConfig = {
- val config = new PathConfig {
- def projectRoot: Path = pathLayout.projectRoot
- def sources: Path = pathLayout.srcDir / "continuations" / "library"
- def analysis: Path = pathLayout.analysisOutput / "continuations" / "library"
- def output: Path = pathLayout.classesOutput / "continuations" / "library"
- }
-
- new CompilationStep("continuation-library", config, log) {
- def label = "["+layer.name+"] continuation library"
- def dependencies = libraryConfig :: compilerConfig :: continuationPluginConfig :: Nil
- def options = Seq(
- "-Xpluginsdir",
- requiredPluginsDirForCompilation.absolutePath,
- "-Xplugin-require:continuations",
- "-P:continuations:enable"
- )
- }
- }
-
- // Grouping compilation steps
- def minimalCompilation = false // It must be true for locker because we do not need to compile everything
-
- def libraryWS: WrapperStep with Packaging
- def toolsWS: WrapperStep
-
- lazy val pluginsWS = new WrapperStep(continuationPluginConfig :: continuationLibraryConfig :: Nil)
- lazy val allSteps = new WrapperStep(libraryWS :: compilerConfig :: pluginsWS :: toolsWS :: Nil)
-}
diff --git a/project/build/BuildInfoEnvironment.scala b/project/build/BuildInfoEnvironment.scala
deleted file mode 100644
index fc1c436c33..0000000000
--- a/project/build/BuildInfoEnvironment.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-import sbt._
-trait BuildInfoEnvironment {
- self : Project =>
- def buildInfoEnvironmentLocation: Path
- /**
- * Environment for storing properties that
- * 1) need to be saved across sbt session
- * 2) Are local to a layer
- * Used to save the last version of the compiler used to build the layer (for discarding it's product if necessary)
- */
- lazy val layerEnvironment = new BasicEnvironment {
- // use the project's Logger for any properties-related logging
- def log = self.log
-
- // the properties file will be read/stored
- def envBackingPath = buildInfoEnvironmentLocation
- // define some properties
- lazy val lastCompilerVersion: Property[String] = propertyOptional[String]("")
- }
-
-}
diff --git a/project/build/Comparator.scala b/project/build/Comparator.scala
deleted file mode 100644
index 7400788ba9..0000000000
--- a/project/build/Comparator.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-import sbt._
-import java.io.{File, FileInputStream}
-
-// Based on scala.tools.ant.Same
-object Comparator {
-
- private def getMappedPath(path: Path, baseDirectory: Path): Path = {
- Path.fromString(baseDirectory, path.relativePath)
- }
-
-
- def compare(origin: Path, dest: Path, filter: Path => PathFinder, log: Logger): Option[String] = {
- log.info("Comparing the contents of "+origin.absolutePath+ " with "+dest.absolutePath)
- var allEqualNow = true
-
- def reportDiff(f1: File, f2: File) = {
- allEqualNow = false
- log.error("File '" + f1 + "' is different from correspondant.")
- }
-
- def reportMissing(f1: File) = {
- allEqualNow = false
- log.error("File '" + f1 + "' has no correspondant.")
- }
-
-
-
- val originPaths = filter(origin).get
-
- val bufferSize = 1024
- val originBuffer = new Array[Byte](bufferSize)
- val destBuffer = new Array[Byte](bufferSize)
-
- for (originPath <- originPaths.filter(! _.isDirectory)){
- log.debug("origin :" + originPath.absolutePath)
- val destPath = getMappedPath(originPath, dest)
- log.debug("dest :" + destPath.absolutePath)
- var equalNow = true
- val originFile = originPath.asFile
- val destFile = destPath.asFile
-
- if (originFile.canRead && destFile.canRead) {
-
- val originStream = new FileInputStream(originFile)
- val destStream = new FileInputStream(destFile)
- var originRemaining = originStream.read(originBuffer)
- var destRemaining = destStream.read(destBuffer)
- while (originRemaining > 0 && equalNow) {
- if (originRemaining == destRemaining)
- for (idx <- 0 until originRemaining) {
- equalNow = equalNow && (originBuffer(idx) == destBuffer(idx))}
- else
- equalNow = false
- originRemaining = originStream.read(originBuffer)
- destRemaining = destStream.read(destBuffer)
- }
- if (destRemaining > 0) equalNow = false
-
- if (!equalNow) reportDiff(originFile, destFile)
-
- originStream.close
- destStream.close
-
- }
- else reportMissing(originFile)
-
- }
- if(allEqualNow) None else Some("There were differences between "+origin.absolutePath+ " and "+ dest.absolutePath)
- }
-
-
-}
diff --git a/project/build/Compilation.scala b/project/build/Compilation.scala
deleted file mode 100644
index d581b2b736..0000000000
--- a/project/build/Compilation.scala
+++ /dev/null
@@ -1,104 +0,0 @@
-import sbt._
-import xsbt.{AnalyzingCompiler, ScalaInstance}
-import FileUtilities._
-
-/**
- * This trait define the compilation task.
-* @author Grégory Moix
- */
-trait Compilation {
- self : ScalaBuildProject with BuildInfoEnvironment =>
-
- def lastUsedCompilerVersion = layerEnvironment.lastCompilerVersion
-
- def instantiationCompilerJar: Path
- def instantiationLibraryJar: Path
-
- def instanceScope[A](action: ScalaInstance => A): A = {
- val instance = ScalaInstance(instantiationLibraryJar.asFile, instantiationCompilerJar.asFile, info.launcher, msilJar.asFile, fjbgJar.asFile)
- log.debug("Compiler will be instantiated by :" +instance.compilerJar +" and :" +instance.libraryJar )
- action(instance)
- }
-
- def compile(stepList: Step, clean:() => Option[String]): Option[String] = compile(stepList, Some(clean))
- def compile(stepList: Step): Option[String] = compile(stepList, None)
- /**
- * Execute the different compilation parts one after the others.
- */
- def compile(stepsList: Step, clean: Option[() => Option[String]]): Option[String] ={
-
- instanceScope[Option[String]]{ scala =>
- lazy val analyzing = new AnalyzingCompiler(scala, componentManager, xsbt.ClasspathOptions.manual, log)
-
- def compilerVersionHasChanged = lastUsedCompilerVersion.value != scala.actualVersion
-
- def checkAndClean(cleanFunction:() => Option[String]): Option[String] ={
- if (compilerVersionHasChanged) {
- log.info("The compiler version used to build this layer has changed since last time or this is a clean build.")
- lastUsedCompilerVersion.update(scala.actualVersion)
- layerEnvironment.saveEnvironment
- cleanFunction()
- } else {
- log.debug("The compiler version is unchanged. No need for cleaning.")
- None
- }
- }
-
- def compile0(steps: List[Step]): Option[String] = {
- steps foreach {
- case c: CompilationStep =>
- val conditional = new CompileConditional(c, analyzing)
- log.info("")
- val res = conditional.run orElse copy(c) orElse earlyPackaging(c)
- if (res.isDefined)
- return res
- case _ => ()
- }
- None
- }
-
- /**
- * When we finishe to compile a step we want to jar if necessary in order to
- * be able to load plugins for the associated library
- */
- def earlyPackaging(step: CompilationStep): Option[String] = step match {
- case s: EarlyPackaging => {
- val c = s.earlyPackagingConfig
- log.debug("Creating jar for plugin")
- jar(c.content.flatMap(Packer.jarPattern(_)), c.jarDestination, c.manifest, false, log)
- }
- case _ => None
- }
-
- def copy(step: CompilationStep): Option[String] = step match {
- case s: ResourcesToCopy => s.copy
- case _ => None
- }
-
- def cleanIfNecessary: Option[String] = clean match {
- case None => None
- case Some(f) => checkAndClean(f)
- }
- cleanIfNecessary orElse compile0(stepsList.topologicalSort)
- }
- }
-
-
-}
-
-trait LayerCompilation extends Compilation {
- self : BasicLayer =>
-
- protected def cleanCompilation: Option[String] = {
- log.info("Cleaning the products of the compilation.")
- FileUtilities.clean(layerOutput :: Nil, true, log)
- }
-
- /**
- * Run the actual compilation. Should not be called directly because it is executed on the same jvm and that
- * it could lead to memory issues. It is used only when launching a new sbt process to do the compilation.
- */
- lazy val compilation = task {compile(allSteps, cleanCompilation _)}
-
- def externalCompilation: Option[String] = maybeFork(compilation)
-}
diff --git a/project/build/CompilationStep.scala b/project/build/CompilationStep.scala
deleted file mode 100644
index 000dca0234..0000000000
--- a/project/build/CompilationStep.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-import sbt._
-import AdditionalResources._
-
-trait Step extends Dag[Step] {
- def dependencies: Iterable[Step]
-}
-
-class WrapperStep(contents: List[Step]) extends Step {
- def dependencies = contents
-}
-
-abstract class CompilationStep(val name: String, val pathConfig: PathConfig, logger: Logger) extends CompileConfiguration with Step {
- def this(name: String, layout: PathLayout, logger: Logger) = this(name, layout / name, logger)
-
- // Utility methods (for quick access, ...)
- final def srcDir = pathConfig.sources
-
- // Methods required for the compilation
- def log: Logger = logger
- final def sourceRoots : PathFinder = pathConfig.sources
- def sources: PathFinder = sourceRoots.descendentsExcept("*.java" | "*.scala", ".svn")
- final def projectPath: Path = pathConfig.projectRoot
- final def analysisPath: Path = pathConfig.analysis
- final def outputDirectory: Path = pathConfig.output
- def classpath = {
- def addDependenciesOutputTo(list: List[Step], acc: PathFinder): PathFinder = list match {
- case Nil => acc
- case x :: xs => x match {
- case c: CompilationStep => addDependenciesOutputTo(xs, acc +++ c.outputDirectory)
- case w: WrapperStep => addDependenciesOutputTo(xs, addDependenciesOutputTo(dependencies.toList, acc))
- }
- }
- addDependenciesOutputTo(dependencies.toList, outputDirectory)
- }
- def javaOptions: Seq[String] = "-target 1.5 -source 1.5 -g:none" split ' '
- def maxErrors: Int = 100
- def compileOrder = CompileOrder.JavaThenScala
- def fingerprints = Fingerprints(Nil, Nil)
-}
diff --git a/project/build/ForkSBT.scala b/project/build/ForkSBT.scala
deleted file mode 100644
index b30e35e61f..0000000000
--- a/project/build/ForkSBT.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-/** Scala SBT build
- * Copyright 2005-2010 LAMP/EPFL
- * @author Paul Phillips
- */
-
-import sbt._
-
-/** Worked out a way to fork sbt tasks, preserving all sbt command line
- * options and without hardcoding anything.
- */
-trait ForkSBT {
- self: BasicLayer =>
-
- def jvmArguments: List[String] = {
- import scala.collection.jcl.Conversions._
- import java.lang.management.ManagementFactory
- ManagementFactory.getRuntimeMXBean().getInputArguments().toList
- }
-
- private var extraJVMArgs: List[String] = Nil
- def withJVMArgs[T](args: String*)(body: => T): T = {
- val saved = extraJVMArgs
- extraJVMArgs = args.toList
- try { body }
- finally extraJVMArgs = saved
- }
-
- // Set a property in forked sbts to inhibit possible forking cycles.
- def markForked = "-D" + forkProperty + "=true"
-
- /** Forks a new process to run "sbt task task ...":
- */
- def forkTasks(tasks: String*): Boolean = {
- require (!isForked, "Tried to fork but sbt is already forked: " + tasks.mkString(" "))
-
- val sbtJar = System.getProperty("java.class.path")
- val sbtMain = "xsbt.boot.Boot" // ok, much of anything.
- val args = jvmArguments ++ Seq(markForked, "-classpath", sbtJar, sbtMain) ++ tasks
-
- log.info("Forking: " + args.mkString("java ", " ", ""))
- Fork.java(None, args, StdoutOutput) == 0
- }
- def maybeFork(task: TaskManager#Task): Option[String] = maybeFork(task, "Error during external compilation.")
- def maybeFork(task: TaskManager#Task, errorMsg: String): Option[String] = {
- if (isForked) task.run
- else if (forkTasks("project " + this.name, task.name)) None
- else Some(errorMsg)
- }
-}
diff --git a/project/build/Packer.scala b/project/build/Packer.scala
deleted file mode 100644
index 73db5567b6..0000000000
--- a/project/build/Packer.scala
+++ /dev/null
@@ -1,122 +0,0 @@
-import sbt._
-import java.io.{File, FileInputStream}
-import java.util.jar.Manifest
-import AdditionalResources._
-import FileUtilities._
-
-
-
-object Packer {
-
- /**
- * A filter that exclude files that musn't be in a jar file.
- */
- // We must exclude the manifest because we generate it automatically, and when we add multiples other jars, they could have
- // also a manifest files each, resulting in conflicts for the FileUtilities.jar(..) method
- def jarPattern(path: PathFinder) = path.descendentsExcept(AllPassFilter, (".*" - ".") || HiddenFileFilter || new ExactFilter("MANIFEST.MF")).get
-
- def createJar(j: Packaging, log: Logger): Option[String] = createJar(j.packagingConfig, log, jarPattern _, true)
- def createJar(j: PackagingConfiguration, log: Logger): Option[String] = createJar(j, log, jarPattern _, true)
-
-
- /**
- * Create a jar from the packaging trait. Is able to add directly others jars to it
- */
- def createJar(j: PackagingConfiguration, log: Logger, filter:(PathFinder) => Iterable[Path], addIncludedLibs: Boolean): Option[String] = {
- def pack0(content: Iterable[Path])= jar(content.flatMap(filter(_)), j.jarDestination, j.manifest, false, log)
-
- j.jarsToInclude match {
- case Nil => pack0(j.content)
- case list if addIncludedLibs => {
- withTemporaryDirectory(log) { tmp: File =>
- val tmpPath = Path.fromFile(tmp)
- log.debug("List of jars to be added : " +list)
- def unzip0(l: List[Path]): Option[String] = l match {
- case x :: xs => {unzip(x, tmpPath, log);unzip0(xs)} //TODO properly handle failing of unzip
- case Nil => None
- }
- unzip0(list)
- log.debug("Content of temp folder"+ tmpPath.##.**( GlobFilter("*")))
- pack0(j.content ++ Set(tmpPath ##))
- }
- }
- case _ => pack0(j.content)
-
- }
- }
-
-}
-
-/**
- * Create the jars of pack
- * @author Grégory Moix
- */
-trait Packer {
- self: BasicLayer =>
-
- def libraryToCopy: List[Path] = Nil
-
- /**
- * The actual pack task.
- */
- def packF = {
- import Packer._
- def iterate(steps: List[Step]): Option[String] = steps match {
- case x :: xs => x match {
- case c: Packaging => {
- createJar(c, log) orElse iterate(xs)
- }
- case _ => iterate(xs)
- }
- case Nil => None
- }
-
- def copy0 ={
- copyFile(manifestPath,packingDestination/"META-INF"/"MANIFEST.MF", log) orElse {
- copy(libraryToCopy, packingDestination , true, true, log) match {
- case Right(_) => None
- case Left(e) => Some(e)
- }
- }
- }
- iterate(allSteps.topologicalSort) orElse copy0
- }
- lazy val pack = task {packF}.dependsOn(finishLayer)
-}
-
-
-class PackagingConfiguration(val jarDestination: Path, val content: Iterable[Path], val manifest: Manifest, val jarsToInclude: List[Path]){
- def this(jarDestination: Path, content: Iterable[Path])= this(jarDestination, content, new Manifest, Nil)
- def this(jarDestination: Path, content: Iterable[Path], jarsToInclude: List[Path])= this(jarDestination, content, new Manifest, jarsToInclude)
-}
-
-trait Packaging extends Step {
- def packagingConfig: PackagingConfiguration
-}
-
-trait WrapperPackaging extends Packaging {
- self : WrapperStep =>
-
- def jarContent = {
- def getContent(list: List[Step], acc: List[Path]): List[Path] = list match {
- case Nil => acc
- case x :: xs => x match {
- case w: WrapperStep => getContent(xs, getContent(w.dependencies.toList, acc))
- case c: CompilationStep => getContent(xs, (c.outputDirectory ##) :: acc)
- }
- }
- getContent(dependencies.toList, Nil)
- }
-}
-
-/**
- * This trait is here to add the possiblity to have a different packing destination that is used right after the
- * compilation of the step has finished. It permits to have use libraries that are build using a plugin. (The plugin must
- * be a jar in order to be recognised by the compiler.
- */
-trait EarlyPackaging extends Packaging {
- self: CompilationStep =>
- //def earlyPackagingDestination: Path
- //def earlyJarDestination = earlyPackagingDestination / jarName
- def earlyPackagingConfig: PackagingConfiguration
-}
diff --git a/project/build/Partest.scala b/project/build/Partest.scala
deleted file mode 100755
index 7771c6f208..0000000000
--- a/project/build/Partest.scala
+++ /dev/null
@@ -1,370 +0,0 @@
-import sbt._
-import java.io.File
-import java.net.URLClassLoader
-import TestSet.{filter}
-
-class TestSet(val SType: TestSetType.Value, val kind: String, val description: String, val files: Array[File]){
- /**
- * @param a list of file that we want to know wheter they are members of the test set or not
- * @return two lists : the first contains files that are member of the test set, the second contains the files that aren't
- */
- def splitContent(f: List[File]):(List[File], List[File]) = {
- f.partition((f: File) => files.elements.exists((e: File) => f == e))
- }
-}
-
-object TestSet {
- def apply(sType: TestSetType.Value, kind: String, description: String, files: PathFinder)= new TestSet(sType, kind, description, filter(files))
- def filter(p: PathFinder): Array[File] =( p --- p **(HiddenFileFilter || GlobFilter("*.obj")||GlobFilter("*.log"))).getFiles.toArray
-}
-
-object TestSetType extends Enumeration {
- val Std, Continuations = Value
-}
-
-class TestConfiguration(val library: Path, val classpath: Iterable[Path], val testRoot: Path,
- val tests: List[TestSet], val junitReportDir: Option[Path]){
-}
-
-trait PartestRunner {
- self: BasicLayer with Packer =>
-
- import Partest.runTest
- import TestSetType._
-
- lazy val testRoot = projectRoot / "test"
- lazy val testFiles = testRoot / "files" ##
- lazy val testLibs = testFiles / "lib"
-
- lazy val posFilesTest = TestSet(Std,"pos", "Compiling files that are expected to build", testFiles / "pos" * ("*.scala" || DirectoryFilter))
- lazy val negFilesTest = TestSet(Std,"neg", "Compiling files that are expected to fail", testFiles / "neg" * ("*.scala" || DirectoryFilter))
- lazy val runFilesTest = TestSet(Std,"run", "Compiling and running files", testFiles / "run" * ("*.scala" || DirectoryFilter))
- lazy val jvmFilesTest = TestSet(Std,"jvm", "Compiling and running files", testFiles / "jvm" *("*.scala" || DirectoryFilter))
- lazy val resFilesTest = TestSet(Std,"res", "Running resident compiler scenarii", testFiles / "res" * ("*.res"))
- lazy val buildmanagerFilesTest = TestSet(Std,"buildmanager", "Running Build Manager scenarii", testFiles / "buildmanager" * DirectoryFilter)
- // lazy val scalacheckFilesTest = TestSet(Std,"scalacheck", "Running scalacheck tests", testFiles / "scalacheck" * ("*.scala" || DirectoryFilter))
- lazy val scriptFilesTest = TestSet(Std,"script", "Running script files", testFiles / "script" * ("*.scala"))
- lazy val shootoutFilesTest = TestSet(Std,"shootout", "Running shootout tests", testFiles / "shootout" * ("*.scala"))
- lazy val scalapFilesTest = TestSet(Std,"scalap", "Running scalap tests", testFiles / "scalap" * ("*.scala"))
- lazy val specializedFilesTest = TestSet(Std,"specialized", "Running specialized tests", testFiles / "specialized" * ("*.scala"))
-
- // lazy val negContinuationTest = TestSet(Continuations,"neg", "Compiling continuations files that are expected to fail", testFiles / "continuations-neg" * ("*.scala" || DirectoryFilter))
- // lazy val runContinuationTest = TestSet(Continuations,"run", "Compiling and running continuations files", testFiles / "continuations-run" ** ("*.scala" ))
- //
- // lazy val continuationScalaOpts = (
- // "-Xpluginsdir " +
- // continuationPluginConfig.packagingConfig.jarDestination.asFile.getParent +
- // " -Xplugin-require:continuations -P:continuations:enable"
- // )
-
- lazy val testSuiteFiles: List[TestSet] = List(
- posFilesTest, negFilesTest, runFilesTest, jvmFilesTest, resFilesTest,
- buildmanagerFilesTest,
- //scalacheckFilesTest,
- shootoutFilesTest, scalapFilesTest,
- specializedFilesTest
- )
- lazy val testSuiteContinuation: List[TestSet] = Nil // List(negContinuationTest, runContinuationTest)
-
- private lazy val filesTestMap: Map[String, TestSet] =
- Map(testSuiteFiles.map(s => (s.kind,s) ):_*)
- // + (("continuations-neg",negContinuationTest),("continuations-run", runContinuationTest))
-
- private lazy val partestOptions = List("-failed")
-
- private lazy val partestCompletionList: Seq[String] = {
- val len = testFiles.asFile.toString.length + 1
-
- filesTestMap.keys.toList ++ partestOptions ++
- (filesTestMap.values.toList flatMap (_.files) map (_.toString take len))
- }
-
- private def runPartest(tests: List[TestSet], scalacOpts: Option[String], failedOnly: Boolean) = {
-
- val config = new TestConfiguration(
- outputLibraryJar,
- (outputLibraryJar +++ outputCompilerJar +++ outputPartestJar +++ outputScalapJar +++ antJar +++ jlineJar +++ (testLibs * "*.jar")).get,
- testRoot,
- tests,
- None
- )
-
- val javaHome = Path.fromFile(new File(System.getProperty("java.home")))
- val java = Some(javaHome / "bin" / "java" asFile)
- val javac = Some(javaHome / "bin" / "javac" asFile)
- val timeout = Some("2400000")
- val loader = info.launcher.topLoader
-
- log.debug("Ready to run tests")
-
- if (tests.isEmpty) {
- log.debug("Empty test list")
- None
- }
- else runTest(
- loader, config, java, javac,
- scalacOpts, timeout, true, true,
- failedOnly, true, isDebug, log
- )
- }
-
- def partestDebugProp =
- if (isDebug) List("-Dpartest.debug=true")
- else Nil
-
- lazy val externalPartest = task { args =>
- task {
- if (isForked) partest(args).run
- else withJVMArgs(partestDebugProp ++ args: _*) {
- if (forkTasks("partest")) None
- else Some("Some tests failed.")
- }
- } dependsOn pack
- } completeWith partestCompletionList
-
- lazy val partest = task { args =>
- var failedOnly = false
-
- def setOptions(options: List[String], acc: List[String]): List[String] = options match {
- case "-failed" :: xs =>
- failedOnly = true
- log.info("Only tests that failed previously will be run")
- setOptions(xs, acc)
- case x :: xs =>
- setOptions(xs, x :: acc)
- case _ => acc
- }
-
- def resolveSets(l: List[String], rem: List[String], acc: List[TestSet]): (List[String], List[TestSet]) = {
- def searchSet(arg: String): Option[TestSet] = filesTestMap get arg
-
- l match {
- case x :: xs => searchSet(x) match {
- case Some(s) => resolveSets(xs, rem, s :: acc)
- case None => resolveSets(xs, x :: rem, acc)
- }
- case Nil => (rem, acc)
- }
- }
-
- def resolveFiles(l: List[String], sets: List[TestSet]):(List[String], List[TestSet]) = {
- def resolve0(filesToResolve: List[File], setsToSearchIn: List[TestSet], setAcc: List[TestSet]):(List[String], List[TestSet])= {
- filesToResolve match {
- case Nil => (Nil, setAcc) // If we have no files left to resolve, we can return the list of the set we have
- case list => {
- setsToSearchIn match {
- case Nil => (list.map(_.toString), setAcc)// If we already had search all sets to find a match, we return the list of the files that where problematic and the set we have
- case x :: xs => {
- val (found, notFound)= x.splitContent(list)
- if(!found.isEmpty){
- val newSet = new TestSet(x.SType, x.kind, x.description, found.toArray)
- resolve0(notFound, xs, newSet :: setAcc)
- } else {
- resolve0(notFound, xs, setAcc)
- }
- }
- }
- }
- }
-
- }
-
- resolve0(l.map(Path.fromString(testFiles, _).asFile), filesTestMap.values.toList, sets)
- }
-
- val keys = setOptions(args.toList, Nil)
-
- if (keys.isEmpty) {
- task { runPartest(testSuiteFiles, None, failedOnly) }
- }
- else {
- val (fileNames, sets) = resolveSets(keys, Nil, Nil)
- val (notFound, allSets) = resolveFiles(fileNames, sets)
- if (!notFound.isEmpty)
- log.info("Don't know what to do with : \n"+notFound.mkString("\n"))
-
- task { runPartest(allSets, None, failedOnly) }
- }
- // if (keys.length == 0) task {
- // runPartest(testSuiteFiles, None, failedOnly) orElse {
- // runPartest(testSuiteContinuation, None, failedOnly)
- // } // this is the case where there were only config options, we will run the standard test suite
- // }
- // else {
- // val (fileNames, sets) = resolveSets(keys, Nil, Nil)
- // val (notFound, allSets) = resolveFiles(fileNames, sets)
- // if (!notFound.isEmpty)
- // log.info("Don't know what to do with : \n"+notFound.mkString("\n"))
- //
- // val (std, continuations) = allSets partition (_.SType == TestSetType.Std)
- // task {
- // runPartest(std, None, failedOnly) orElse {
- // runPartest(continuations, Some(continuationScalaOpts), failedOnly)
- // }
- // }
- // }
- }.completeWith(partestCompletionList)
-
-}
-
-object Partest {
- def runTest(
- parentLoader: ClassLoader,
- config: TestConfiguration,
- javacmd: Option[File],
- javaccmd: Option[File],
- scalacOpts: Option[String],
- timeout: Option[String],
- showDiff: Boolean,
- showLog: Boolean,
- runFailed: Boolean,
- errorOnFailed: Boolean,
- debug: Boolean,
- log: Logger
- ): Option[String] = {
-
- if (debug)
- log.setLevel(Level.Debug)
-
- if (config.classpath.isEmpty)
- return Some("The classpath is empty")
-
- log.debug("Classpath is "+ config.classpath)
-
- val classloader = new URLClassLoader(
- Array(config.classpath.toSeq.map(_.asURL):_*),
- ClassLoader.getSystemClassLoader.getParent
- )
- val runner: AnyRef =
- classloader.loadClass("scala.tools.partest.nest.SBTRunner").newInstance().asInstanceOf[AnyRef]
- val fileManager: AnyRef =
- runner.getClass.getMethod("fileManager", Array[Class[_]](): _*).invoke(runner, Array[Object](): _*)
-
- val runMethod =
- runner.getClass.getMethod("reflectiveRunTestsForFiles", Array(classOf[Array[File]], classOf[String]): _*)
-
- def runTestsForFiles(kindFiles: Array[File], kind: String) = {
- val result = runMethod.invoke(runner, Array(kindFiles, kind): _*).asInstanceOf[java.util.HashMap[String, Int]]
- scala.collection.jcl.Conversions.convertMap(result)
- }
-
- def setFileManagerBooleanProperty(name: String, value: Boolean) {
- log.debug("Setting partest property :"+name+" to :"+value)
- val setMethod =
- fileManager.getClass.getMethod(name+"_$eq", Array(classOf[Boolean]): _*)
- setMethod.invoke(fileManager, Array(java.lang.Boolean.valueOf(value)).asInstanceOf[Array[Object]]: _*)
- }
-
- def setFileManagerStringProperty(name: String, value: String) {
- log.debug("Setting partest property :"+name+" to :"+value)
- val setMethod =
- fileManager.getClass.getMethod(name+"_$eq", Array(classOf[String]): _*)
- setMethod.invoke(fileManager, Array(value).asInstanceOf[Array[Object]]: _*)
- }
-
- // System.setProperty("partest.srcdir", "files")
-
- setFileManagerBooleanProperty("showDiff", showDiff)
- setFileManagerBooleanProperty("showLog", showLog)
- setFileManagerBooleanProperty("failed", runFailed)
- if (!javacmd.isEmpty)
- setFileManagerStringProperty("JAVACMD", javacmd.get.getAbsolutePath)
- if (!javaccmd.isEmpty)
- setFileManagerStringProperty("JAVAC_CMD", "javac")
- setFileManagerStringProperty("CLASSPATH", (config.classpath.map(_.absolutePath).mkString(File.pathSeparator)))
- setFileManagerStringProperty("LATEST_LIB", config.library.absolutePath)
- setFileManagerStringProperty("SCALAC_OPTS", scalacOpts getOrElse "")
-
- if (!timeout.isEmpty)
- setFileManagerStringProperty("timeout", timeout.get)
-
- type TFSet = (Array[File], String, String)
-
- val testFileSets = config.tests
-
- def resultsToStatistics(results: Iterable[(_, Int)]): (Int, Int) = {
- val (files, failures) = results map (_._2 == 0) partition (_ == true)
- def count(i: Iterable[_]): Int ={
- var c = 0
- for (elem <-i) yield {
- c = c+1
- }
- c
- }
- (count(files), count(failures))
- }
-
-
- def runSet(set: TestSet): (Int, Int, Iterable[String]) = {
- val (files, name, msg) = (set.files, set.kind, set.description)
- log.debug("["+name+"] "+ msg+files.mkString(", files :\n","\n",""))
- if (files.isEmpty) {
- log.debug("No files !")
- (0, 0, List())
- }
- else {
- log.info(name +" : "+ msg)
- val results: Iterable[(String, Int)] = runTestsForFiles(files, name)
- val (succs, fails) = resultsToStatistics(results)
-
- val failed: Iterable[String] = results.filter( _._2!=0) map(_ match {
- case (path, 1) => path + " [FAILED]"
- case (path, 2) => path + " [TIMOUT]"
- })
-
- val r =(succs, fails, failed)
-
- config.junitReportDir match {
- case Some(d) => {
- val report = testReport(name, results, succs, fails)
- scala.xml.XML.save(d/name+".xml", report)
- }
- case None =>
- }
-
- r
- }
- }
-
- val _results = testFileSets map runSet
- val allSuccesses = _results.map (_._1).foldLeft(0)( _ + _ )
- val allFailures = _results.map (_._2).foldLeft(0)( _ + _ )
- val allFailedPaths = _results flatMap (_._3)
-
- def f(msg: String): Option[String] =
- if (errorOnFailed && allFailures > 0) {
- Some(msg)
- }
- else {
- log.info(msg)
- None
- }
- def s = if (allFailures > 1) "s" else ""
- val msg =
- if (allFailures > 0) "Test suite finished with %d case%s failing.\n".format(allFailures, s)+ allFailedPaths.mkString("\n")
- else if (allSuccesses == 0) "There were no tests to run."
- else "Test suite finished with no failures."
-
- f(msg)
-
- }
-
- private def oneResult(res: (String, Int)) =
- <testcase name ={res._1}>{
- res._2 match {
- case 0 => scala.xml.NodeSeq.Empty
- case 1 => <failure message="Test failed"/>
- case 2 => <failure message="Test timed out"/>
- }
- }</testcase>
-
- private def testReport(kind: String, results: Iterable[(String, Int)], succs: Int, fails: Int) =
- <testsuite name ={kind} tests ={(succs + fails).toString} failures ={fails.toString}>
- <properties/>
- {
- results.map(oneResult(_))
- }
- </testsuite>
-
-
-}
diff --git a/project/build/PathConfig.scala b/project/build/PathConfig.scala
deleted file mode 100644
index 3ed56846f9..0000000000
--- a/project/build/PathConfig.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-import sbt._
-
-/**
- * An abstract class for grouping all different paths that are needed to
- * compile the a CompilationStep
- * @author Grégory Moix
- */
-abstract class PathConfig {
- def projectRoot: Path
- def sources: Path
- def analysis: Path
- def output: Path
-}
-
-object PathConfig {
- val classes = "classes"
- val analysis = "analysis"
-}
-
-trait SimpleOutputLayout {
- def outputDir: Path
- lazy val classesOutput = outputDir / PathConfig.classes
- lazy val analysisOutput = outputDir / PathConfig.analysis
-
-}
-
-class PathLayout(val projectRoot: Path, val outputDir: Path) extends SimpleOutputLayout {
- lazy val srcDir = projectRoot / "src"
- /**
- * An utility method to easily create StandardPathConfig from a given path layout
- */
- def /(name: String)= new StandardPathConfig(this, name)
-}
-
-/**
- *
- */
-class StandardPathConfig(layout: PathLayout, name: String) extends PathConfig {
- lazy val projectRoot = layout.projectRoot
- lazy val sources = layout.srcDir / name
- lazy val analysis = layout.analysisOutput / name
- lazy val output = layout.classesOutput / name
-}
diff --git a/project/build/SVN.scala b/project/build/SVN.scala
deleted file mode 100644
index 427469eb64..0000000000
--- a/project/build/SVN.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-import sbt._
-
-/**
- * @param root the root of an svn repository
- * @author Moix Grégory
- */
-class SVN(root: Path) {
- /** Location of tool which parses svn revision in git-svn repository. */
- val GitSvnRevTool = root / "tools" / "get-scala-revision"
- val GitSvnRegex = """^Revision:\s*(\d+).*""".r
-
- /**
- * Gets the revision number of the repository given through the constructor of the class
- * It assumes that svn or git is installed on the running computer. Return 0 if it was not
- * able to found the revision number
- */
- def getRevisionNumber: Int = getSvn orElse getGit getOrElse 0
- def getSvn: Option[Int] = {
- /** Doing this the hard way trying to suppress the svn error message
- * on stderr. Could not figure out how to do it simply in sbt.
- */
- val pb = new java.lang.ProcessBuilder("svn", "info")
- pb directory root.asFile
- pb redirectErrorStream true
-
- Process(pb).lines_! foreach {
- case GitSvnRegex(rev) => return Some(rev.toInt)
- case _ => ()
- }
- None
- }
-
- def getGit: Option[Int] =
- try { Some(Process(GitSvnRevTool.toString, root).!!.trim.toInt) }
- catch { case _: Exception => None }
-}
diff --git a/project/build/ScalaBuildProject.scala b/project/build/ScalaBuildProject.scala
deleted file mode 100644
index 250ad7a429..0000000000
--- a/project/build/ScalaBuildProject.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-import sbt._
-import ScalaBuildProject._
-
-
-abstract class ScalaBuildProject extends Project {
- lazy val projectRoot = info.projectPath
- lazy val layerOutput = outputRootPath / name
- lazy val pathLayout = new PathLayout(projectRoot, layerOutput)
-
- lazy val manifestPath = projectRoot/"META-INF"/"MANIFEST.MF"
-
- lazy val lib = projectRoot / "lib"
- lazy val forkJoinJar = lib / forkjoinJarName
- lazy val jlineJar = lib / jlineJarName
- lazy val antJar = lib / "ant" / "ant.jar"
- lazy val fjbgJar = lib / fjbgJarName
- lazy val msilJar = lib / msilJarName
-
-}
-
-object ScalaBuildProject {
- // Some path definitions related strings
- val compilerJarName = "scala-compiler.jar"
- val libraryJarName = "scala-library.jar"
- val scalacheckJarName = "scalacheck.jar"
- val scalapJarName = "scalap.jar"
- val dbcJarName = "scala-dbc.jar"
- val swingJarName = "scala-swing.jar"
- val partestJarName = "scala-partest.jar"
- val fjbgJarName = "fjbg.jar"
- val msilJarName = "msil.jar"
- val jlineJarName = "jline.jar"
- val forkjoinJarName = "forkjoin.jar"
-
-
-}
diff --git a/project/build/ScalaSBTBuilder.scala b/project/build/ScalaSBTBuilder.scala
deleted file mode 100644
index 81c7860020..0000000000
--- a/project/build/ScalaSBTBuilder.scala
+++ /dev/null
@@ -1,362 +0,0 @@
-import sbt._
-import ScalaBuildProject._
-import ScalaSBTBuilder._
-
-/**
- * This class is the entry point for building scala with SBT.
- * @author Grégory Moix
- */
-class ScalaSBTBuilder(val info: ProjectInfo)
- extends Project
- with ReflectiveProject
- with BasicDependencyProject
- // with IdeaProject
- with MavenStyleScalaPaths {
- /** This secret system property turns off transitive dependencies during change
- * detection. It's a short term measure. BE AWARE! That means you can no longer
- * trust sbt to recompile everything: it's only recompiling changed files.
- * (The alternative is that adding a space to TraversableLike incurs a 10+ minute
- * incremental build, which means sbt doesn't get used at all, so this is better.)
- */
- System.setProperty("sbt.intransitive", "true")
-
- // Required by BasicDependencyProject
- def fullUnmanagedClasspath(config: Configuration) = unmanagedClasspath
-
- override def dependencies: Iterable[Project] = (
- info.dependencies ++
- locker.dependencies ++
- quick.dependencies ++
- strap.dependencies ++
- libs.dependencies
- )
- override def shouldCheckOutputDirectories = false
-
- // Support of triggered execution at top level
- override def watchPaths = info.projectPath / "src" ** ("*.scala" || "*.java" || AdditionalResources.basicFilter)
-
- // Top Level Tasks
- lazy val buildFjbg = libs.buildFjbg.describedAs(buildFjbgTaskDescription)
- lazy val buildForkjoin = libs.buildForkjoin.describedAs(buildForkjoinTaskDescription)
- lazy val buildMsil = libs.buildMsil.describedAs(buildMislTaskDescription)
- lazy val clean = quick.clean.dependsOn(libs.clean).describedAs(cleanTaskDescription)
- lazy val cleanAll = locker.clean.dependsOn(libs.clean).describedAs(cleanAllTaskDescription)
- lazy val compile = task {None}.dependsOn(quick.binPack, quick.binQuick).describedAs(buildTaskDescription)
- lazy val docs = quick.scaladoc.describedAs(docsTaskDescription)
- lazy val newFjbg = libs.newFjbg.describedAs(newFjbgTaskDescription)
- lazy val newForkjoin = libs.newForkjoin.describedAs(newForkjoinTaskDescription)
- lazy val newLocker = locker.newLocker.describedAs(newLockerTaskDescription)
- lazy val newMsil = libs.newMsil.describedAs(newMsilTaskDescription)
- lazy val newStarr = quick.newStarr.describedAs(newStarrTaskDescription)
- lazy val palo = locker.pack.describedAs(paloTaskDescription)
- lazy val pasta = quick.pasta.describedAs(pastaTaskDescription)
- lazy val stabilityTest = strap.stabilityTest.describedAs(stabilityTestTaskDescription)
- lazy val test = quick.externalPartest.describedAs(partestTaskDescription)
-
- // Non-standard names for tasks chosen earlier which I point at the standard ones.
- lazy val build = compile
- lazy val partest = test
-
- // Top level variables
-
- /**
- * The version number of the compiler that will be created by the run of sbt. It is initialised once
- * the first time it is needed, meaning that this number will be kept
- * until sbt quit.
- */
- lazy val versionNumber: String ={
- def getTimeString: String = {
- import java.util.Calendar;
- import java.text.SimpleDateFormat;
- val formatString = "yyyyMMddHHmmss"
- new SimpleDateFormat(formatString) format Calendar.getInstance.getTime
- }
- def getVersion: String = projectVersion.value.toString takeWhile (_ != '-') mkString
- def getRevision: Int = new SVN(info.projectPath) getRevisionNumber
-
- getVersion+".r"+getRevision+"-b"+getTimeString
- }
-
- /* LAYER DEFINITIONS
- * We define here what's specific to each layer are they differ.
- * The common behavior is defined in the BasicLayer class
- * It is important that the class that extends BasicLayer are inner classes of ScalaSBTBuilder. If not, SBT will
- * not know what the main project definition is, as it will find many classes that extends Project
- */
-
- lazy val locker = project(info.projectPath,"locker", new LockerLayer(_))
- lazy val quick = project(info.projectPath,"quick", new QuickLayer(_, locker))
- lazy val strap = project(info.projectPath,"strap", new StrapLayer(_, quick))
- lazy val libs = project(info.projectPath,"libs", new LibsBuilder(_))
-
-
- /**
- * Definition of what is specific to the locker layer. It implements SimplePacker in order to
- * be able to create palo (packed locker)
- */
- class LockerLayer(info: ProjectInfo) extends BasicLayer(info, versionNumber, None) with Packer {
-
-
- override lazy val nextLayer = Some(quick)
- lazy val instantiationCompilerJar = lib / compilerJarName
- lazy val instantiationLibraryJar = lib / libraryJarName
- lazy val lockFile = layerOutput / "locker.lock"
-
- /**
- * We override the definition of the task method in order to make the tasks of this layer
- * be executed only if the layer is not locked. Task of this layer that should be executed
- * whether the layer is locked or not should call super.task instead
- */
- override def task(action : => Option[String])=
- super.task {
- if (lockFile.exists) {
- log.info(name +" is locked")
- None
- }
- else action
- }
-
- def deleteLock = FileUtilities.clean(lockFile, log)
- def createLock = {
- log.info("locking "+name)
- FileUtilities.touch(lockFile, log)
- }
-
- /**
- * Task for locking locker
- */
- lazy val lock = super.task {
- createLock
- }
-
- /**
- * Task for unlocking locker
- */
- lazy val unlock = super.task {
- deleteLock
- }
-
- lazy val newLocker = super.task {
- createNewLocker
- }
- def createNewLocker = {
- deleteLock orElse
- buildLayer orElse
- createLock
- }
-
-
- /**
- * Making locker being locked when it has finished building
- */
- override lazy val finishLayer = lock.dependsOn(build)
-
- override lazy val pack = super.task {packF}.dependsOn(finishLayer)
-
-
- override lazy val packingDestination: Path = outputRootPath /"palo"
-
- override lazy val libraryWS = {
- new WrapperStep(libraryConfig :: Nil) with WrapperPackaging {
- lazy val packagingConfig = new PackagingConfiguration(libsDestination/libraryJarName, jarContent)
- }
- }
- override val minimalCompilation = true
- override lazy val pluginsWS: WrapperStep = new WrapperStep(Nil)
- override lazy val toolsWS = new WrapperStep(Nil)
- }
-
-
- /**
- * Definition of what is specific to the quick layer. It implements Packer in order to create pack, ScalaTools
- * for creating the binaries and Scaladoc to generate the documentation
- */
- class QuickLayer(info: ProjectInfo, previous: BasicLayer) extends BasicLayer(info, versionNumber, Some(previous)) with PartestRunner
- with Packer with ScalaTools with Scaladoc {
-
- override lazy val nextLayer = Some(strap)
-
-
- lazy val instantiationCompilerJar = previous.compilerOutput
- lazy val instantiationLibraryJar = previous.libraryOutput
-
-
- override lazy val packingDestination: Path = outputRootPath/ "pack"
-
- override def libraryToCopy = jlineJar :: Nil
- override def compilerAdditionalJars = msilJar :: fjbgJar :: Nil
- override def libraryAdditionalJars = forkJoinJar :: Nil
-
- override def cleaningList = packedStarrOutput :: super.cleaningList
-
-
- override lazy val libraryWS = new WrapperStep(libraryConfig :: actorsConfig :: dbcConfig :: swingConfig :: Nil) with Packaging {
- def jarContent = List(libraryConfig , actorsConfig, continuationLibraryConfig).map(_.outputDirectory ##)
- lazy val starrJarContent = List(libraryConfig , actorsConfig, dbcConfig, swingConfig, continuationLibraryConfig).map(_.outputDirectory ##)
- lazy val packagingConfig = new PackagingConfiguration(libsDestination/libraryJarName, jarContent, libraryAdditionalJars)
- lazy val starrPackagingConfig = new PackagingConfiguration(packedStarrOutput/libraryJarName, starrJarContent)
-
- }
-
- override lazy val toolsWS = new WrapperStep(scalacheckConfig :: scalapConfig :: partestConfig :: Nil)
-
- // An additional task for building only the library of quick
- // Used for compiling msil
- lazy val compileLibraryOnly = task {
- compile(libraryConfig, cleanCompilation _)
- }
- lazy val externalCompileLibraryOnly = task(maybeFork(compileLibraryOnly)) dependsOn startLayer
-
- def createNewStarrJar: Option[String] ={
- import Packer._
- createJar(libraryWS.starrPackagingConfig, log) orElse
- createJar(compilerConfig.starrPackagingConfig, log)
- }
- lazy val pasta = task {
- createNewStarrJar
- }.dependsOn(build)
-
- lazy val newStarr = task {
- val files = (packedStarrOutput ##) * "*.jar"
- FileUtilities.copy(files.get, lib, true, log) match {
- case Right(_) => None
- case Left(_) => Some("Error occured when copying the new starr to its destination")
- }
-
- }.dependsOn(pasta)
-
- /*
- * Defining here the creation of the binaries for quick and pack
- */
- private lazy val quickBinClasspath = libraryOutput :: actorsOutput :: dbcOutput :: swingOutput :: compilerOutput :: scalapOutput :: forkJoinJar :: fjbgJar :: msilJar :: jlineJar :: Nil
- private lazy val packBinClasspath = Nil
- lazy val binQuick = tools(layerOutput / "bin", quickBinClasspath).dependsOn(finishLayer)
- lazy val binPack = tools(packingDestination / "bin", packBinClasspath).dependsOn(pack)
- }
-
-
- /**
- * Definition of what is specific to the strap layer
- */
- class StrapLayer(info: ProjectInfo, previous: BasicLayer) extends BasicLayer(info, versionNumber, Some(previous)) {
-
- lazy val instantiationCompilerJar = previous.compilerOutput
- lazy val instantiationLibraryJar = previous.libraryOutput
- private val quick = previous
-
- override lazy val libraryWS = new WrapperStep(libraryConfig :: actorsConfig :: dbcConfig :: swingConfig :: Nil) with WrapperPackaging {
- lazy val packagingConfig = new PackagingConfiguration(libsDestination/libraryJarName, Set())
-
- }
-
- override lazy val toolsWS = new WrapperStep(scalacheckConfig :: scalapConfig :: partestConfig :: Nil)
-
-
- def compare = {
- import PathConfig.classes
- def filter(path: Path)= path.descendentsExcept(AllPassFilter, HiddenFileFilter || "*.properties")
- Comparator.compare(quick.pathLayout.outputDir/classes ##, this.pathLayout.outputDir/classes ##, filter _ , log)
- }
-
- lazy val stabilityTest = task {
- log.warn("Stability test must be run on a clean build in order to yield correct results.")
- compare
- }.dependsOn(finishLayer)
- }
-
- /**
- * An additional subproject used to build new version of forkjoin, fjbg and msil
- */
- class LibsBuilder(val info: ProjectInfo) extends ScalaBuildProject with ReflectiveProject with Compilation with BuildInfoEnvironment {
- override def dependencies = info.dependencies
- override def watchPaths = info.projectPath / "src" ** ("*.scala" || "*.java" ||AdditionalResources.basicFilter) // Support of triggered execution at project level
-
-
- def buildInfoEnvironmentLocation: Path = outputRootPath / ("build-"+name+".properties")
-
- def instantiationCompilerJar: Path = locker.compilerOutput
- def instantiationLibraryJar: Path = locker.libraryOutput
-
- def libsDestination = layerOutput
-
- lazy val checkJavaVersion = task {
- val version = System.getProperty("java.version")
- log.debug("java.version ="+version)
- val required = "1.6"
- if (version.startsWith(required)) None else Some("Incompatible java version : required "+required)
- }
-
-
- private def simpleBuild(step: CompilationStep with Packaging)= task {
- import Packer._
- compile(step) orElse createJar(step, log)
- }.dependsOn(locker.finishLayer)
-
- private def copyJar(step: CompilationStep with Packaging, name: String) = task {
- FileUtilities.copyFile(step.packagingConfig.jarDestination, lib/name, log)
- }
-
- lazy val newForkjoin = copyJar(forkJoinConfig, forkjoinJarName).dependsOn(buildForkjoin)
- lazy val buildForkjoin = simpleBuild(forkJoinConfig).dependsOn(checkJavaVersion)
- lazy val newFjbg = copyJar(fjbgConfig, fjbgJarName).dependsOn(buildFjbg)
- lazy val buildFjbg = simpleBuild(fjbgConfig)
- lazy val newMsil = copyJar(msilConfig, msilJarName).dependsOn(buildMsil)
- // TODO As msil contains scala files, maybe needed compile it with an ExternalSBTRunner
- lazy val buildMsil = simpleBuild(msilConfig).dependsOn(quick.externalCompileLibraryOnly)
-
- lazy val forkJoinConfig = new CompilationStep("forkjoin", pathLayout, log) with Packaging {
- def label = "new forkjoin library"
- override def sources: PathFinder = sourceRoots.descendentsExcept("*.java", ".svn")
- def dependencies = Seq()
- def options = Seq()
- override def javaOptions = Seq("-target","1.5","-source","1.5","-g")
- lazy val packagingConfig = new PackagingConfiguration(libsDestination/forkjoinJarName, List(outputDirectory ##))
- }
-
- lazy val fjbgConfig = new CompilationStep("fjbg", pathLayout, log) with Packaging {
- def label = "new fjbg library"
- override def sources: PathFinder = sourceRoots.descendentsExcept("*.java", ".svn")
- def dependencies = Seq()
- def options = Seq()
- override def javaOptions = Seq("-target","1.5","-source","1.4","-g")
- lazy val packagingConfig = new PackagingConfiguration(libsDestination/fjbgJarName, List(outputDirectory ##))
-
- }
-
- lazy val msilConfig = new CompilationStep("msil", pathLayout, log) with Packaging {
- def label = "new msil library"
- override def sources: PathFinder = sourceRoots.descendentsExcept("*.java" |"*.scala", ".svn" |"tests")
- def dependencies = Seq()
- override def classpath = super.classpath +++ quick.libraryOutput
- def options = Seq()
- override def javaOptions = Seq("-target","1.5","-source","1.4","-g")
- lazy val packagingConfig = new PackagingConfiguration(libsDestination/msilJarName, List(outputDirectory ##))
-
- }
-
- def cleaningList = layerOutput :: layerEnvironment.envBackingPath :: Nil
-
- def cleanFiles = FileUtilities.clean(cleaningList, true, log)
-
- lazy val clean: Task = task {cleanFiles}// We use super.task, so cleaning is done in every case, even when locked
-
- }
-}
-object ScalaSBTBuilder {
- val buildTaskDescription = "build locker, lock it, build quick and create pack. It is the equivalent command to 'ant build'."
- val cleanTaskDescription = "clean the outputs of quick and strap. locker remains untouched."
- val cleanAllTaskDescription = "same as clean, but in addition clean locker too."
- val docsTaskDescription = "generate the scaladoc"
- val partestTaskDescription = "run partest"
- val stabilityTestTaskDescription = "run stability testing. It is required to use a clean build (for example, execute the clean-all action) in order to ensure correctness of the result."
- val paloTaskDescription = "create palo"
- val pastaTaskDescription = "create all the jar needed to make a new starr from quick (pasta = packed starr). It does not replace the current library and compiler jars in the libs folder, but the products of the task are instead located in target/pasta"
- val newStarrTaskDescription = "create a new starr and replace the library and compiler jars in the libs folder. It will keep locker locker locked, meaning that if you want to update locker after updating starr, you must run the 'new-locker' command. It will not automatically run partest and stability testing before replacing."
- val newLockerTaskDescription = "replace locker. It will build a new locker. It does not automatically rebuild quick."
- val buildForkjoinTaskDescription = "create all the jar needed to make a new forkjoin. It does not replace the current library and compiler jars in the libs folder, but the products of the task are instead located in target/libs."
- val newForkjoinTaskDescription = "create a new forkjoin and replace the corresponding jar in the libs folder."
- val buildFjbgTaskDescription = "create all the jar needed to make a new fjbg. It does not replace the current library and compiler jars in the libs folder, but the products of the task are instead located in target/libs."
- val newFjbgTaskDescription = "create a new fjbg and replace the corresponding jar in the libs folder."
- val buildMislTaskDescription = "create all the jar needed to make a new msil. It does not replace the current library and compiler jars in the libs folder, but the products of the task are instead located in target/libs."
- val newMsilTaskDescription = "create a msil and replace the corresponding jar in the libs folder."
-}
diff --git a/project/build/ScalaTools.scala b/project/build/ScalaTools.scala
deleted file mode 100644
index d74639d63a..0000000000
--- a/project/build/ScalaTools.scala
+++ /dev/null
@@ -1,179 +0,0 @@
-import java.io.{FileInputStream, File, InputStream, FileWriter}
-import sbt._
-import scala.io._
-
-/**
- * Create the scala binaries
- * Based on scala.tools.ant.ScalaTool
- * @author Grégory Moix (for the sbt adaptation)
- */
-trait ScalaTools {
- self: BasicLayer =>
-
- lazy val templatesLocation = compilerConfig.srcDir/ "scala" / "tools" / "ant" / "templates"
- lazy val unixTemplate = templatesLocation / "tool-unix.tmpl"
- lazy val winTemplate = templatesLocation / "tool-windows.tmpl"
-
-
- // XXX encoding and generalize
- private def getResourceAsCharStream(resource: Path): Stream[Char] = {
- val stream = new FileInputStream(resource.asFile)
- def streamReader(): Stream[Char] = stream.read match {
- case -1 => Stream.empty
- case value => Stream.cons(value.asInstanceOf[Char], streamReader())
-
- }
- if (stream == null) {
- log.debug("Stream was null")
- Stream.empty
- }
-
- //else Stream continually stream.read() takeWhile (_ != -1) map (_.asInstanceOf[Char]) // Does not work in scala 2.7.7
- else streamReader
- }
-
-
- // Converts a variable like @SCALA_HOME@ to ${SCALA_HOME} when pre = "${" and post = "}"
- private def transposeVariableMarkup(text: String, pre: String, post: String) : String = {
- val chars = Source.fromString(text)
- val builder = new StringBuilder()
-
- while (chars.hasNext) {
- val char = chars.next
- if (char == '@') {
- var char = chars.next
- val token = new StringBuilder()
- while (chars.hasNext && char != '@') {
- token.append(char)
- char = chars.next
- }
- if (token.toString == "")
- builder.append('@')
- else
- builder.append(pre + token.toString + post)
- } else builder.append(char)
- }
- builder.toString
- }
-
- private def readAndPatchResource(resource: Path, tokens: Map[String, String]): String = {
- val chars = getResourceAsCharStream(resource).elements
- val builder = new StringBuilder()
-
- while (chars.hasNext) {
- val char = chars.next
- if (char == '@') {
- var char = chars.next
- val token = new StringBuilder()
- while (chars.hasNext && char != '@') {
- token.append(char)
- char = chars.next
- }
- if (tokens.contains(token.toString))
- builder.append(tokens(token.toString))
- else if (token.toString == "")
- builder.append('@')
- else
- builder.append("@" + token.toString + "@")
- } else builder.append(char)
- }
- builder.toString
- }
-
- private def writeFile(file: File, content: String, makeExecutable: Boolean): Option[String] =
- if (file.exists() && !file.canWrite())
- Some("File " + file + " is not writable")
- else {
- val writer = new FileWriter(file, false)
- writer.write(content)
- writer.close()
- file.setExecutable(makeExecutable)
- None
- }
-
- /** Gets the value of the classpath attribute in a Scala-friendly form.
- * @return The class path as a list of files. */
- private def getUnixclasspath(classpath: List[String]): String =
- transposeVariableMarkup(classpath.mkString("", ":", "").replace('\\', '/'), "${", "}")
-
- /** Gets the value of the classpath attribute in a Scala-friendly form.
- * @return The class path as a list of files. */
- private def getWinclasspath(classpath: List[String]): String =
- transposeVariableMarkup(classpath.mkString("", ";", "").replace('/', '\\'), "%", "%")
-
- /** Performs the tool creation of a tool with for a given os
- * @param file
- * @param mainClas
- * @param properties
- * @param javaFlags
- * @param toolFlags
- * @param classPath
- * @param template
- * @param classpathFormater
- */
- private def tool(template: Path, classpathFormater: List[String] => String, file: Path, mainClass: String,
- properties: String, javaFlags: String, toolFlags: String, classPath: List[Path], makeExecutable: Boolean): Option[String] = {
- val patches = Map (
- ("class", mainClass),
- ("properties", properties),
- ("javaflags", javaFlags),
- ("toolflags", toolFlags),
- ("classpath", classpathFormater(classPath.map(_.absolutePath)))
- )
-
- val result = readAndPatchResource(template, patches)
- writeFile(file.asFile, result, makeExecutable)
-
- }
- private def generateTool(config: ToolConfiguration): Option[String] =
- generateTool(config.toolName, config.destination, config.mainClass, config.properties, config.javaFlags, config.toolFlags, config.classPath)
-
- private def generateTool(toolName: String, destination: Path, mainClass: String, properties: String, javaFlags: String, toolFlags: String, classPath: List[Path]): Option[String] ={
- val unixFile = destination / toolName
- val winFile = destination /(toolName + ".bat")
- tool(unixTemplate, getUnixclasspath, unixFile, mainClass, properties, javaFlags, toolFlags, classPath, true) orElse
- tool(winTemplate, getWinclasspath, winFile, mainClass, properties, javaFlags, toolFlags, classPath, false)
- }
-
-
- /*============================================================================*\
- ** Definition of the different tools **
- \*============================================================================*/
- private val defaultJavaFlags = "-Xmx256M -Xms32M"
-
- /**
- * A class that holds the different parameters of a tool
- */
- class ToolConfiguration(val toolName: String, val destination: Path, val mainClass: String, val properties: String, val javaFlags: String, val toolFlags: String, val classPath: List[Path])
-
- /**
- * Generate all tools
- * @param destination Root folder where all the binaries will be written
- * @param classpath Should be specified when you want to use a specific classpath, could be Nil if you want
- * to make the bin use what is in the lib folder of the distribution.
- */
- def tools(destination: Path, classpath: List[Path]) = task {
- val scala = new ToolConfiguration("scala", destination, "scala.tools.nsc.MainGenericRunner", "",defaultJavaFlags, "", classpath)
- val scalac = new ToolConfiguration("scalac", destination, "scala.tools.nsc.Main", "",defaultJavaFlags, "", classpath)
- val scaladoc = new ToolConfiguration("scaladoc",destination,"scala.tools.nsc.ScalaDoc", "",defaultJavaFlags,"", classpath)
- val fsc = new ToolConfiguration("fsc", destination,"scala.tools.nsc.CompileClient", "",defaultJavaFlags, "", classpath)
- val scalap = new ToolConfiguration("scalap",destination, "scala.tools.scalap.Main", "",defaultJavaFlags, "", classpath)
-
-
- val toolList = scala :: scalac :: scaladoc :: fsc :: scalap :: Nil
-
- def process(list: List[ToolConfiguration]): Option[String] = list match {
- case x :: xs => {
- log.debug("Generating "+x.toolName+" bin")
- generateTool(x) orElse process(xs)
- }
- case Nil => None
-
- }
- FileUtilities.createDirectory(destination, log)
- process(toolList)
-
- }
-}
-
-
diff --git a/project/build/Scaladoc.scala b/project/build/Scaladoc.scala
deleted file mode 100644
index 39bcb5226e..0000000000
--- a/project/build/Scaladoc.scala
+++ /dev/null
@@ -1,48 +0,0 @@
-import sbt._
-import xsbt.AnalyzingCompiler
-
-trait Scaladoc {
- self: BasicLayer with Packer =>
-
- lazy val documentationDestination = outputRootPath / "scaladoc"
- lazy val libraryDocumentationDestination = documentationDestination / "library"
- lazy val compilerDocumentationDestination = documentationDestination / "compiler"
- lazy val libraryDoc = {
- val reflect = librarySrcDir / "scala" / "reflect"
- val runtime = librarySrcDir / "scala" / "runtime"
-
- ((librarySrcDir +++ actorsSrcDir +++ swingSrcDir)**("*.scala")---
- reflect / "Code.scala" ---
- reflect / "Manifest.scala" ---
- reflect / "Print.scala" ---
- reflect / "Symbol.scala" ---
- reflect / "Tree.scala" ---
- reflect / "Type.scala" ---
- reflect / "TypedCode.scala" ---
- runtime /"ScalaRunTime.scala" ---
- runtime / "StreamCons.scala" ---
- runtime / "StringAdd.scala" ---
- runtime * ("*$.scala") ---
- runtime *("*Array.scala")
- )
-
- }
- lazy val compilerDoc = {
- compilerSrcDir **("*.scala")
- }
- lazy val classpath ={
- (antJar +++ jlineJar +++ msilJar +++ fjbgJar +++ forkJoinJar +++ outputLibraryJar +++ outputCompilerJar +++ outputPartestJar +++ outputScalapJar ).get
-
- }
- lazy val scaladoc = task(maybeFork(generateScaladoc, "Error generating scaladoc")) dependsOn pack
-
- lazy val generateScaladoc = task {
- instanceScope[Option[String]]{ scala =>
- lazy val compiler = new AnalyzingCompiler(scala, componentManager, xsbt.ClasspathOptions.manual, log)
- val docGenerator = new sbt.Scaladoc(50, compiler)
- docGenerator("Scala "+ versionNumber+" API", libraryDoc.get, classpath, libraryDocumentationDestination, Seq(), log) orElse
- docGenerator("Scala Compiler"+ versionNumber+" API", compilerDoc.get, classpath, compilerDocumentationDestination, Seq(), log)
- }
- }
-
-}
diff --git a/project/plugins/Plugins.scala b/project/plugins/Plugins.scala
deleted file mode 100644
index 15ee162329..0000000000
--- a/project/plugins/Plugins.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-import sbt._
-
-class Plugins(info: ProjectInfo) extends PluginDefinition(info) {
- val sbtIdeaRepo = "sbt-idea-repo" at "http://mpeltonen.github.com/maven/"
- val sbtIdea = "com.github.mpeltonen" % "sbt-idea-plugin" % "0.2.0"
-} \ No newline at end of file
diff --git a/project/plugins/build.sbt b/project/plugins/build.sbt
new file mode 100644
index 0000000000..71bf3b9379
--- /dev/null
+++ b/project/plugins/build.sbt
@@ -0,0 +1,2 @@
+resolvers += Resolver.url("Typesafe nightlies", url("https://typesafe.artifactoryonline.com/typesafe/ivy-snapshots/"))(Resolver.ivyStylePatterns)
+
diff --git a/project/plugins/project/Build.scala b/project/plugins/project/Build.scala
new file mode 100644
index 0000000000..f382cf7015
--- /dev/null
+++ b/project/plugins/project/Build.scala
@@ -0,0 +1,7 @@
+import sbt._
+object PluginDef extends Build {
+ override def projects = Seq(root)
+ lazy val root = Project("plugins", file(".")) dependsOn(proguard)
+ lazy val proguard = uri("git://github.com/jsuereth/xsbt-proguard-plugin.git")
+ //lazy val proguard = uri("git://github.com/siasia/xsbt-proguard-plugin.git")
+}