summaryrefslogtreecommitdiff
path: root/project
diff options
context:
space:
mode:
authorPaul Phillips <paulp@improving.org>2013-06-13 05:54:44 -0400
committerPaul Phillips <paulp@improving.org>2013-06-13 11:47:43 -0400
commit77bf3a09328709f702bc066fe93fc3e12cd64ba2 (patch)
treeee5f6b47e416b2a5c09cef0aa8ae4bdc42757599 /project
parent5345eb27373953a3c171a7e2f9ff302db9045d33 (diff)
downloadscala-77bf3a09328709f702bc066fe93fc3e12cd64ba2.tar.gz
scala-77bf3a09328709f702bc066fe93fc3e12cd64ba2.tar.bz2
scala-77bf3a09328709f702bc066fe93fc3e12cd64ba2.zip
Removed sbt build.
Difficult though it may be to accept, it must go. We couldn't keep it working with active maintenance; after eight months of neglect there is no chance. Nobody is working on it or using it. The code will remain in the history if anyone wants it. One of the most annoying experiences one can have when building a new project is finding out one has been fiddling with an abandoned build system which isn't even expected to work. Sometimes I check out a scala project and there is a build.xml, a pom.xml, and a project directory. We should not be among those who sow such build confusion.
Diffstat (limited to 'project')
-rw-r--r--project/Build.scala334
-rw-r--r--project/Layers.scala116
-rw-r--r--project/Packaging.scala129
-rw-r--r--project/Partest.scala140
-rw-r--r--project/Release.scala30
-rw-r--r--project/RemoteDependencies.scala53
-rw-r--r--project/Sametest.scala63
-rw-r--r--project/ScalaBuildKeys.scala23
-rw-r--r--project/ScalaToolRunner.scala21
-rw-r--r--project/ShaResolve.scala148
-rw-r--r--project/Testing.scala41
-rw-r--r--project/VerifyClassLoad.scala46
-rw-r--r--project/Versions.scala142
-rw-r--r--project/plugins.sbt9
-rw-r--r--project/project/Build.scala7
15 files changed, 0 insertions, 1302 deletions
diff --git a/project/Build.scala b/project/Build.scala
deleted file mode 100644
index efa8a7a038..0000000000
--- a/project/Build.scala
+++ /dev/null
@@ -1,334 +0,0 @@
-import sbt._
-import Keys._
-import partest._
-import ScalaBuildKeys._
-import Release._
-
-
-object ScalaBuild extends Build with Layers with Packaging with Testing {
-
- // Build wide settings:
- override lazy val settings = super.settings ++ Versions.settings ++ Seq(
- autoScalaLibrary := false,
- resolvers += Resolver.url(
- "Typesafe nightlies",
- url("https://typesafe.artifactoryonline.com/typesafe/ivy-snapshots/")
- )(Resolver.ivyStylePatterns),
- resolvers ++= Seq(
- "junit interface repo" at "https://repository.jboss.org/nexus/content/repositories/scala-tools-releases",
- ScalaToolsSnapshots
- ),
- organization := "org.scala-lang",
- version <<= Versions.mavenVersion,
- pomExtra := epflPomExtra
- )
-
- // Collections of projects to run 'compile' on.
- lazy val compiledProjects = Seq(quickLib, quickComp, continuationsLibrary, actors, swing, forkjoin)
- // Collection of projects to 'package' and 'publish' together.
- lazy val packagedBinaryProjects = Seq(scalaLibrary, scalaCompiler, swing, actors, continuationsPlugin, jline, scalap)
- lazy val partestRunProjects = Seq(testsuite, continuationsTestsuite)
-
- private def epflPomExtra = (
- <xml:group>
- <inceptionYear>2002</inceptionYear>
- <licenses>
- <license>
- <name>BSD-like</name>
- <url>http://www.scala-lang.org/downloads/license.html</url>
- </license>
- </licenses>
- <scm>
- <connection>scm:git:git://github.com/scala/scala.git</connection>
- </scm>
- <issueManagement>
- <system>jira</system>
- <url>http://issues.scala-lang.org</url>
- </issueManagement>
- </xml:group>
- )
-
- // Settings used to make sure publishing goes smoothly.
- def publishSettings: Seq[Setting[_]] = Seq(
- ivyScala ~= ((is: Option[IvyScala]) => is.map(_.copy(checkExplicit = false))),
- pomIncludeRepository := (_ => false),
- publishMavenStyle := true,
- makePomConfiguration <<= makePomConfiguration apply (_.copy(configurations = Some(Seq(Compile, Default)))),
- pomExtra := epflPomExtra
- )
-
- // Settings for root project. These are aggregate tasks against the rest of the build.
- def projectSettings: Seq[Setting[_]] = publishSettings ++ Seq(
- doc in Compile <<= (doc in documentation in Compile).identity,
- // These next two aggregate commands on several projects and return results that are to be ignored by remaining tasks.
- compile in Compile <<= compiledProjects.map(p => compile in p in Compile).join.map(_.head),
- // TODO - just clean target? i.e. target map IO.deleteRecursively
- clean <<= (compiledProjects ++ partestRunProjects).map(p => clean in p).dependOn,
- packageBin in Compile <<= packagedBinaryProjects.map(p => packageBin in p in Compile).join.map(_.head),
- // TODO - Make sure scalaLibrary has packageDoc + packageSrc from documentation attached...
- publish <<= packagedBinaryProjects.map(p => publish in p).join.map(_.head),
- publishLocal <<= packagedBinaryProjects.map(p => publishLocal in p).join.map(_.head),
- packageDoc in Compile <<= (packageDoc in documentation in Compile).identity,
- packageSrc in Compile <<= (packageSrc in documentation in Compile).identity,
- test in Test <<= (runPartest in testsuite, runPartest in continuationsTestsuite, checkSame in testsuite) map { (a,b,c) => () },
- lockerLock <<= (lockFile in lockerLib, lockFile in lockerComp, compile in Compile in lockerLib, compile in Compile in lockerComp) map { (lib, comp, _, _) =>
- Seq(lib,comp).foreach(f => IO.touch(f))
- },
- lockerUnlock <<= (lockFile in lockerLib, lockFile in lockerComp) map { (lib, comp) =>
- Seq(lib,comp).foreach(IO.delete)
- },
- genBinQuick <<= (genBinQuick in scaladist).identity,
- makeDist <<= (makeDist in scaladist).identity,
- makeExplodedDist <<= (makeExplodedDist in scaladist).identity,
- // Note: We override unmanagedSources so that ~ compile will look at all these sources, then run our aggregated compile...
- unmanagedSourceDirectories in Compile <<= baseDirectory apply (_ / "src") apply { dir =>
- Seq("library/scala","actors","compiler","swing","continuations/library","forkjoin") map (dir / _)
- },
- // TODO - Make exported products == makeDist so we can use this when creating a *real* distribution.
- commands += Release.pushStarr
- )
- // Note: Root project is determined by lowest-alphabetical project that has baseDirectory as file("."). we use aaa_ to 'win'.
- lazy val aaa_root = Project("scala", file(".")) settings(projectSettings: _*) settings(ShaResolve.settings: _*)
-
- // External dependencies used for various projects
- lazy val externalDeps: Setting[_] = libraryDependencies <<= (sbtVersion)(v =>
- Seq(
- "org.apache.ant" % "ant" % "1.8.2",
- "org.scala-sbt" % "compiler-interface" % v % "provided"
- )
- )
-
- def fixArtifactSrc(dir: File, name: String) = name match {
- case x if x startsWith "scala-" => dir / "src" / (name drop 6)
- case x => dir / "src" / name
- }
-
- // These are setting overrides for most artifacts in the Scala build file.
- def settingOverrides: Seq[Setting[_]] = publishSettings ++ Seq(
- crossPaths := false,
- autoScalaLibrary := false,
- // Work around a bug where scala-library (and forkjoin) is put on classpath for analysis.
- classpathOptions := ClasspathOptions.manual,
- publishArtifact in packageDoc := false,
- publishArtifact in packageSrc := false,
- target <<= (baseDirectory, name) apply (_ / "target" / _),
- (classDirectory in Compile) <<= target(_ / "classes"),
- javacOptions ++= Seq("-target", "1.5", "-source", "1.5"),
- scalaSource in Compile <<= (baseDirectory, name) apply fixArtifactSrc,
- javaSource in Compile <<= (baseDirectory, name) apply fixArtifactSrc,
- unmanagedJars in Compile := Seq(),
- // Most libs in the compiler use this order to build.
- compileOrder in Compile := CompileOrder.JavaThenScala,
- lockFile <<= target(_ / "compile.lock"),
- skip in Compile <<= lockFile map (_.exists),
- lock <<= lockFile map (f => IO.touch(f)),
- unlock <<= lockFile map IO.delete
- )
-
- // --------------------------------------------------------------
- // Libraries used by Scalac that change infrequently
- // (or hopefully so).
- // --------------------------------------------------------------
-
- // Jline nested project. Compile this sucker once and be done.
- lazy val jline = Project("jline", file("src/jline"))
- // Our wrapped version of asm.
- lazy val asm = Project("asm", file(".")) settings(settingOverrides : _*)
- // Forkjoin backport
- lazy val forkjoin = Project("forkjoin", file(".")) settings(settingOverrides : _*)
-
- // --------------------------------------------------------------
- // The magic kingdom.
- // Layered compilation of Scala.
- // Stable Reference -> Locker ('Lockable' dev version) -> Quick -> Strap (Binary compatibility testing)
- // --------------------------------------------------------------
-
- // Need a report on this...
- // TODO - Resolve STARR from a repo..
- lazy val STARR = scalaInstance <<= (appConfiguration, ShaResolve.pullBinaryLibs in ThisBuild) map { (app, _) =>
- val launcher = app.provider.scalaProvider.launcher
- val library = file("lib/scala-library.jar")
- val compiler = file("lib/scala-compiler.jar")
- val libJars = (file("lib") * "*.jar").get filterNot Set(library, compiler)
- ScalaInstance("starr", library, compiler, launcher, libJars: _*)
- }
-
- // Locker is a lockable Scala compiler that can be built of 'current' source to perform rapid development.
- lazy val (lockerLib, lockerReflect, lockerComp) = makeLayer("locker", STARR, autoLock = true)
- lazy val locker = Project("locker", file(".")) aggregate(lockerLib, lockerReflect, lockerComp)
-
- // Quick is the general purpose project layer for the Scala compiler.
- lazy val (quickLib, quickReflect, quickComp) = makeLayer("quick", makeScalaReference("locker", lockerLib, lockerReflect, lockerComp))
- lazy val quick = Project("quick", file(".")) aggregate(quickLib, quickReflect, quickComp)
-
- // Reference to quick scala instance.
- lazy val quickScalaInstance = makeScalaReference("quick", quickLib, quickReflect, quickComp)
- def quickScalaLibraryDependency = unmanagedClasspath in Compile <++= (exportedProducts in quickLib in Compile).identity
- def quickScalaReflectDependency = unmanagedClasspath in Compile <++= (exportedProducts in quickReflect in Compile).identity
- def quickScalaCompilerDependency = unmanagedClasspath in Compile <++= (exportedProducts in quickComp in Compile).identity
-
- // Strapp is used to test binary 'sameness' between things built with locker and things built with quick.
- lazy val (strappLib, strappReflect, strappComp) = makeLayer("strapp", quickScalaInstance)
-
- // --------------------------------------------------------------
- // Projects dependent on layered compilation (quick)
- // --------------------------------------------------------------
- def addCheaterDependency(projectName: String): Setting[_] =
- pomPostProcess <<= (version, organization, pomPostProcess) apply { (v,o,k) =>
- val dependency: scala.xml.Node =
- <dependency>
- <groupId>{o}</groupId>
- <artifactid>{projectName}</artifactid>
- <version>{v}</version>
- </dependency>
- def fixDependencies(node: scala.xml.Node): scala.xml.Node = node match {
- case <dependencies>{nested@_*}</dependencies> => <dependencies>{dependency}{nested}</dependencies>
- case x => x
- }
- // This is a hack to get around issues where \ and \\ don't work if any of the children are `scala.xml.Group`.
- def hasDependencies(root: scala.xml.Node): Boolean =
- (root.child collectFirst {
- case n: scala.xml.Elem if n.label == "dependencies" => n
- } isEmpty)
- // TODO - Keep namespace on project...
- k andThen {
- case n @ <project>{ nested@_*}</project> if hasDependencies(n) =>
- <project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0">{nested}<dependencies>{dependency}</dependencies></project>
- case <project>{ nested@_*}</project> =>
- <project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0">{ nested map fixDependencies }</project>
- }
- }
-
- // TODO - in sabbus, these all use locker to build... I think tihs way is better, but let's farm this idea around.
- lazy val dependentProjectSettings = settingOverrides ++ Seq(quickScalaInstance, quickScalaLibraryDependency, addCheaterDependency("scala-library"))
- lazy val actors = Project("scala-actors", file(".")) settings(dependentProjectSettings:_*) dependsOn(forkjoin % "provided")
- lazy val swing = Project("scala-swing", file(".")) settings(dependentProjectSettings:_*) dependsOn(actors % "provided")
- // This project will generate man pages (in man1 and html) for scala.
- lazy val manmakerSettings: Seq[Setting[_]] = dependentProjectSettings :+ externalDeps
- lazy val manmaker = Project("manual", file(".")) settings(manmakerSettings:_*)
-
- // Things that compile against the compiler.
- lazy val compilerDependentProjectSettings = dependentProjectSettings ++ Seq(quickScalaReflectDependency, quickScalaCompilerDependency, addCheaterDependency("scala-compiler"))
-
- lazy val scalacheck = Project("scalacheck", file(".")) settings(compilerDependentProjectSettings:_*) dependsOn(actors % "provided")
- lazy val partestSettings = compilerDependentProjectSettings :+ externalDeps
- lazy val partest = Project("partest", file(".")) settings(partestSettings:_*) dependsOn(actors,forkjoin,scalap,asm)
- lazy val scalapSettings = compilerDependentProjectSettings ++ Seq(
- name := "scalap",
- exportJars := true
- )
- lazy val scalap = Project("scalap", file(".")) settings(scalapSettings:_*)
-
- // --------------------------------------------------------------
- // Continuations plugin + library
- // --------------------------------------------------------------
- lazy val continuationsPluginSettings = compilerDependentProjectSettings ++ Seq(
- scalaSource in Compile <<= baseDirectory(_ / "src/continuations/plugin/"),
- resourceDirectory in Compile <<= baseDirectory(_ / "src/continuations/plugin/"),
- exportJars := true,
- name := "continuations" // Note: This artifact is directly exported.
-
- )
- lazy val continuationsPlugin = Project("continuations-plugin", file(".")) settings(continuationsPluginSettings:_*)
- lazy val continuationsLibrarySettings = dependentProjectSettings ++ Seq(
- scalaSource in Compile <<= baseDirectory(_ / "src/continuations/library/"),
- scalacOptions in Compile <++= (exportedProducts in Compile in continuationsPlugin) map {
- case Seq(cpDir) => Seq("-Xplugin-require:continuations", "-P:continuations:enable", "-Xplugin:"+cpDir.data.getAbsolutePath)
- }
- )
- lazy val continuationsLibrary = Project("continuations-library", file(".")) settings(continuationsLibrarySettings:_*)
-
- // TODO - OSGi Manifest
-
- // --------------------------------------------------------------
- // Real Library Artifact
- // --------------------------------------------------------------
- val allSubpathsCopy = (dir: File) => (dir.*** --- dir) x (relativeTo(dir)|flat)
- def productTaskToMapping(products : Seq[File]) = products flatMap { p => allSubpathsCopy(p) }
- lazy val packageScalaLibBinTask = Seq(quickLib, continuationsLibrary, forkjoin).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
- lazy val scalaLibArtifactSettings: Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaLibBinTask)) ++ Seq(
- name := "scala-library",
- crossPaths := false,
- exportJars := true,
- autoScalaLibrary := false,
- unmanagedJars in Compile := Seq(),
- packageDoc in Compile <<= (packageDoc in documentation in Compile).identity,
- packageSrc in Compile <<= (packageSrc in documentation in Compile).identity,
- fullClasspath in Runtime <<= (exportedProducts in Compile).identity,
- quickScalaInstance,
- target <<= (baseDirectory, name) apply (_ / "target" / _)
- )
- lazy val scalaLibrary = Project("scala-library", file(".")) settings(publishSettings:_*) settings(scalaLibArtifactSettings:_*)
-
- // --------------------------------------------------------------
- // Real Reflect Artifact
- // --------------------------------------------------------------
-
- lazy val packageScalaReflect = Seq(quickReflect).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
- lazy val scalaReflectArtifactSettings : Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaReflect)) ++ Seq(
- name := "scala-reflect",
- crossPaths := false,
- exportJars := true,
- autoScalaLibrary := false,
- unmanagedJars in Compile := Seq(),
- fullClasspath in Runtime <<= (exportedProducts in Compile).identity,
- quickScalaInstance,
- target <<= (baseDirectory, name) apply (_ / "target" / _)
- )
- lazy val scalaReflect = Project("scala-reflect", file(".")) settings(publishSettings:_*) settings(scalaReflectArtifactSettings:_*) dependsOn(scalaLibrary)
-
-
- // --------------------------------------------------------------
- // Real Compiler Artifact
- // --------------------------------------------------------------
- lazy val packageScalaBinTask = Seq(quickComp, asm).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
- lazy val scalaBinArtifactSettings : Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaBinTask)) ++ Seq(
- name := "scala-compiler",
- crossPaths := false,
- exportJars := true,
- autoScalaLibrary := false,
- unmanagedJars in Compile := Seq(),
- fullClasspath in Runtime <<= (exportedProducts in Compile).identity,
- quickScalaInstance,
- target <<= (baseDirectory, name) apply (_ / "target" / _)
- )
- lazy val scalaCompiler = Project("scala-compiler", file(".")) settings(publishSettings:_*) settings(scalaBinArtifactSettings:_*) dependsOn(scalaReflect)
- lazy val fullQuickScalaReference = makeScalaReference("pack", scalaLibrary, scalaReflect, scalaCompiler)
-
-
- // --------------------------------------------------------------
- // Generating Documentation.
- // --------------------------------------------------------------
-
- // TODO - Migrate this into the dist project.
- // Scaladocs
- lazy val documentationSettings: Seq[Setting[_]] = dependentProjectSettings ++ Seq(
- // TODO - Make these work for realz.
- defaultExcludes in unmanagedSources in Compile := ((".*" - ".") || HiddenFileFilter ||
- "reflect/Print.scala" ||
- "reflect/Symbol.scala" ||
- "reflect/Tree.scala" ||
- "reflect/Type.scala" ||
- "runtime/*$.scala" ||
- "runtime/ScalaRuntime.scala" ||
- "runtime/StringAdd.scala" ||
- "scala/swing/test/*"),
- sourceFilter in Compile := ("*.scala"),
- unmanagedSourceDirectories in Compile <<= baseDirectory apply { dir =>
- Seq(dir / "src" / "library" / "scala", dir / "src" / "actors", dir / "src" / "swing", dir / "src" / "continuations" / "library")
- },
- compile := inc.Analysis.Empty,
- // scaladocOptions in Compile <++= (baseDirectory) map (bd =>
- // Seq("-sourcepath", (bd / "src" / "library").getAbsolutePath,
- // "-doc-no-compile", (bd / "src" / "library-aux").getAbsolutePath,
- // "-doc-source-url", """https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk/src/€{FILE_PATH}.scala#L1""",
- // "-doc-root-content", (bd / "compiler/scala/tools/nsc/doc/html/resource/lib/rootdoc.txt").getAbsolutePath
- // )),
- classpathOptions in Compile := ClasspathOptions.manual
- )
- lazy val documentation = (
- Project("documentation", file("."))
- settings (documentationSettings: _*)
- dependsOn(quickLib, quickComp, actors, forkjoin, swing, continuationsLibrary)
- )
-}
diff --git a/project/Layers.scala b/project/Layers.scala
deleted file mode 100644
index 6c939d0ff7..0000000000
--- a/project/Layers.scala
+++ /dev/null
@@ -1,116 +0,0 @@
-import sbt._
-import Keys._
-import com.jsuereth.git.GitKeys.gitRunner
-import ScalaBuildKeys.lock
-
-/** This trait stores all the helper methods to generate layers in Scala's layered build. */
-trait Layers extends Build {
- // TODO - Clean this up or use a self-type.
-
- /** Default SBT overrides needed for layered compilation. */
- def settingOverrides: Seq[Setting[_]]
- /** Reference to the jline project */
- def jline: Project
- /** Reference to forkjoin library */
- def forkjoin: Project
- /** Reference to the ASM wrapped project. */
- def asm: Project
- /** A setting that adds some external dependencies. */
- def externalDeps: Setting[_]
- /** The root project. */
- def aaa_root: Project
-
- /** Creates a reference Scala version that can be used to build other projects. This takes in the raw
- * library, compiler as well as a string representing the layer name (used for compiling the compile-interface).
- */
- def makeScalaReference(layer: String, library: Project, reflect: Project, compiler: Project) =
- scalaInstance <<= (appConfiguration in library,
- version in library,
- (exportedProducts in library in Compile),
- (exportedProducts in reflect in Compile),
- (exportedProducts in compiler in Compile),
- (fullClasspath in jline in Runtime),
- (exportedProducts in asm in Runtime)) map {
- (app, version: String, lib: Classpath, reflect: Classpath, comp: Classpath, jline: Classpath, asm: Classpath) =>
- val launcher = app.provider.scalaProvider.launcher
- (lib,comp) match {
- case (Seq(libraryJar), Seq(compilerJar)) =>
- ScalaInstance(
- version + "-" + layer + "-",
- libraryJar.data,
- compilerJar.data,
- launcher,
- ((jline.files ++ asm.files ++ reflect.files):_*))
- case _ => error("Cannot build a ScalaReference with more than one classpath element")
- }
- }
-
- /** Creates a "layer" of Scala compilation. That is, this will build the next version of Scala from a previous version.
- * Returns the library project and compiler project from the next layer.
- * Note: The library and compiler are not *complete* in the sense that they are missing things like "actors".
- */
- def makeLayer(layer: String, referenceScala: Setting[Task[ScalaInstance]], autoLock: Boolean = false) : (Project, Project, Project) = {
- val autoLockSettings: Seq[Setting[_]] =
- if(autoLock) Seq(compile in Compile <<= (compile in Compile, lock) apply { (c, l) =>
- c flatMapR { cResult =>
- val result = Result.tryValue(cResult)
- l mapR { tx => result }
- }
- })
- else Seq.empty
-
-
- val library = Project(layer + "-library", file(".")) settings(settingOverrides: _*) settings(autoLockSettings:_*) settings(
- version := layer,
- // TODO - use depends on.
- unmanagedClasspath in Compile <<= (exportedProducts in forkjoin in Compile).identity,
- managedClasspath in Compile := Seq(),
- scalaSource in Compile <<= (baseDirectory) apply (_ / "src" / "library"),
- resourceDirectory in Compile <<= baseDirectory apply (_ / "src" / "library"),
- defaultExcludes in unmanagedResources := ("*.scala" | "*.java" | "*.disabled"),
- // TODO - Allow other scalac option settings.
- scalacOptions in Compile <++= (scalaSource in Compile) map (src => Seq("-sourcepath", src.getAbsolutePath)),
- resourceGenerators in Compile <+= (resourceManaged, Versions.scalaVersions, skip in Compile, streams) map Versions.generateVersionPropertiesFile("library.properties"),
- referenceScala
- )
-
- // Define the reflection
- val reflect = Project(layer + "-reflect", file(".")) settings(settingOverrides:_*) settings(autoLockSettings:_*) settings(
- version := layer,
- scalaSource in Compile <<= (baseDirectory) apply (_ / "src" / "reflect"),
- resourceDirectory in Compile <<= baseDirectory apply (_ / "src" / "reflect"),
- defaultExcludes := ("tests"),
- defaultExcludes in unmanagedResources := "*.scala",
- resourceGenerators in Compile <+= (resourceManaged, Versions.scalaVersions, skip in Compile, streams) map Versions.generateVersionPropertiesFile("reflect.properties"),
- // TODO - Use depends on *and* SBT's magic dependency mechanisms...
- unmanagedClasspath in Compile <<= Seq(forkjoin, library).map(exportedProducts in Compile in _).join.map(_.flatten),
- externalDeps,
- referenceScala
- )
-
- // Define the compiler
- val compiler = Project(layer + "-compiler", file(".")) settings(settingOverrides:_*) settings(autoLockSettings:_*) settings(
- version := layer,
- scalaSource in Compile <<= (baseDirectory) apply (_ / "src" / "compiler"),
- resourceDirectory in Compile <<= baseDirectory apply (_ / "src" / "compiler"),
- defaultExcludes := ("tests"),
- defaultExcludes in unmanagedResources := "*.scala",
- resourceGenerators in Compile <+= (resourceManaged, Versions.scalaVersions, skip in Compile, streams) map Versions.generateVersionPropertiesFile("compiler.properties"),
- // Note, we might be able to use the default task, but for some reason ant was filtering files out. Not sure what's up, but we'll
- // stick with that for now.
- unmanagedResources in Compile <<= (baseDirectory) map {
- (bd) =>
- val dirs = Seq(bd / "src" / "compiler")
- dirs.descendentsExcept( ("*.xml" | "*.html" | "*.gif" | "*.png" | "*.js" | "*.css" | "*.tmpl" | "*.swf" | "*.properties" | "*.txt"),"*.scala").get
- },
- // TODO - Use depends on *and* SBT's magic dependency mechanisms...
- unmanagedClasspath in Compile <<= Seq(forkjoin, library, reflect, jline, asm).map(exportedProducts in Compile in _).join.map(_.flatten),
- externalDeps,
- referenceScala
- )
-
- // Return the generated projects.
- (library, reflect, compiler)
- }
-
-}
diff --git a/project/Packaging.scala b/project/Packaging.scala
deleted file mode 100644
index b0060283ac..0000000000
--- a/project/Packaging.scala
+++ /dev/null
@@ -1,129 +0,0 @@
-import sbt._
-import Keys._
-import ScalaBuildKeys._
-
-/** All the settings related to *packaging* the built scala software. */
-trait Packaging { self: ScalaBuild.type =>
-
- // --------------------------------------------------------------
- // Packaging a distro
- // --------------------------------------------------------------
- lazy val scalaDistSettings: Seq[Setting[_]] = Seq(
- crossPaths := false,
- target <<= (baseDirectory, name) apply (_ / "target" / _),
- scalaSource in Compile <<= (baseDirectory, name) apply (_ / "src" / _),
- autoScalaLibrary := false,
- unmanagedJars in Compile := Seq(),
- genBinRunner <<= (fullClasspath in quickComp in Runtime) map (new ScalaToolRunner(_)),
- binDir <<= target(_/"bin"),
- genBin <<= genBinTask(genBinRunner, binDir, fullClasspath in Runtime, false),
- binDir in genBinQuick <<= baseDirectory apply (_ / "target" / "bin"),
- // Configure the classpath this way to avoid having .jar files and previous layers on the classpath.
- fullClasspath in Runtime in genBinQuick <<= Seq(quickComp,quickLib,scalap,actors,swing,jline,forkjoin).map(classDirectory in Compile in _).join.map(Attributed.blankSeq),
- fullClasspath in Runtime in genBinQuick <++= (fullClasspath in Compile in jline),
- genBinQuick <<= genBinTask(genBinRunner, binDir in genBinQuick, fullClasspath in Runtime in genBinQuick, true),
- runManmakerMan <<= runManmakerTask(fullClasspath in Runtime in manmaker, runner in manmaker, "scala.tools.docutil.EmitManPage", "man1", ".1"),
- runManmakerHtml <<= runManmakerTask(fullClasspath in Runtime in manmaker, runner in manmaker, "scala.tools.docutil.EmitHtml", "doc", ".html"),
- // TODO - We could *really* clean this up in many ways. Let's look into making a Seq of "direct jars" (scalaLibrary, scalaCompiler, jline, scalap)
- // a seq of "plugin jars" (continuationsPlugin) and "binaries" (genBin) and "documentation" mappings (genBin) that this can aggregate.
- // really need to figure out a better way to pull jline + jansi.
- makeDistMappings <<= (genBin,
- runManmakerMan,
- runManmakerHtml,
- packageBin in scalaLibrary in Compile,
- packageBin in scalaCompiler in Compile,
- packageBin in jline in Compile,
- packageBin in continuationsPlugin in Compile,
- managedClasspath in jline in Compile,
- packageBin in scalap in Compile) map {
- (binaries, man, html, lib, comp, jline, continuations, jlineDeps, scalap) =>
- val jlineDepMap: Seq[(File, String)] = jlineDeps.map(_.data).flatMap(_ x Path.flat) map { case(a,b) => a -> ("lib/"+b) }
- binaries ++ man ++ html ++ jlineDepMap ++ Seq(
- lib -> "lib/scala-library.jar",
- comp -> "lib/scala-compiler.jar",
- jline -> "lib/jline.jar",
- continuations -> "misc/scala-devel/plugins/continuations.jar",
- scalap -> "lib/scalap.jar"
- )
- },
- // Add in some more dependencies
- makeDistMappings <+= (packageBin in swing in Compile) map (s => s -> "lib/scala-swing.jar"),
- makeDistMappings <+= (packageBin in scalaReflect in Compile) map (s => s -> "lib/scala-reflect.jar"),
- makeDist <<= (makeDistMappings, baseDirectory, streams) map { (maps, dir, s) =>
- s.log.debug("Map = " + maps.mkString("\n"))
- val file = dir / "target" / "scala-dist.zip"
- IO.zip(maps, file)
- s.log.info("Created " + file.getAbsolutePath)
- file
- },
- makeExplodedDist <<= (makeDistMappings, target, streams) map { (maps, dir, s) =>
- def sameFile(f: File, f2: File) = f.getCanonicalPath == f2.getCanonicalPath
- IO.createDirectory(dir)
- IO.copy(for {
- (file, name) <- maps
- val file2 = dir / name
- if !sameFile(file,file2)
- } yield (file, file2))
- // Hack to make binaries be executable. TODO - Fix for JDK 5 and below...
- maps map (_._2) filter (_ startsWith "bin/") foreach (dir / _ setExecutable true)
- dir
- }
- )
- lazy val scaladist = (
- Project("dist", file("."))
- settings (scalaDistSettings: _*)
- )
-
-
-// Helpers to make a distribution
-
- /** Generates runner scripts for distribution. */
- def genBinTask(
- runner: ScopedTask[ScalaToolRunner],
- outputDir: ScopedSetting[File],
- classpath: ScopedTask[Classpath],
- useClasspath: Boolean
- ): Project.Initialize[sbt.Task[Seq[(File,String)]]] = {
- (runner, outputDir, classpath, streams) map { (runner, outDir, cp, s) =>
- IO.createDirectory(outDir)
- val classToFilename = Seq(
- "scala.tools.nsc.MainGenericRunner" -> "scala",
- "scala.tools.nsc.Main" -> "scalac",
- "scala.tools.nsc.ScalaDoc" -> "scaladoc",
- "scala.tools.nsc.CompileClient" -> "fsc",
- "scala.tools.scalap.Main" -> "scalap"
- )
- if (useClasspath) {
- val classpath = Build.data(cp).map(_.getCanonicalPath).distinct.mkString(",")
- s.log.debug("Setting classpath = " + classpath)
- runner setClasspath classpath
- }
- def genBinFiles(cls: String, dest: File) = {
- runner.setClass(cls)
- runner.setFile(dest)
- runner.execute()
- // TODO - Mark generated files as executable (755 or a+x) that is *not* JDK6 specific...
- dest.setExecutable(true)
- }
- def makeBinMappings(cls: String, binName: String): Seq[(File,String)] = {
- val file = outDir / binName
- val winBinName = binName + ".bat"
- genBinFiles(cls, file)
- Seq( file -> ("bin/"+binName), outDir / winBinName -> ("bin/"+winBinName) )
- }
- classToFilename.flatMap((makeBinMappings _).tupled)
- }
- }
- /** Creates man pages for distribution. */
- def runManmakerTask(classpath: ScopedTask[Classpath], scalaRun: ScopedTask[ScalaRun], mainClass: String, dir: String, ext: String): Project.Initialize[Task[Seq[(File,String)]]] =
- (classpath, scalaRun, streams, target) map { (cp, runner, s, target) =>
- val binaries = Seq("fsc", "scala", "scalac", "scaladoc", "scalap")
- binaries map { bin =>
- val file = target / "man" / dir / (bin + ext)
- val classname = "scala.man1." + bin
- IO.createDirectory(file.getParentFile)
- toError(runner.run(mainClass, Build.data(cp), Seq(classname, file.getAbsolutePath), s.log))
- file -> ("man/" + dir + "/" + bin + ext)
- }
- }
-}
diff --git a/project/Partest.scala b/project/Partest.scala
deleted file mode 100644
index 2ea41ba80b..0000000000
--- a/project/Partest.scala
+++ /dev/null
@@ -1,140 +0,0 @@
-import sbt._
-
-import Build._
-import Keys._
-import Project.Initialize
-import complete._
-import scala.collection.{ mutable, immutable }
-
-/** This object */
-object partest {
-
- /** The key for the run-partest task that exists in Scala's test suite. */
- lazy val runPartest = TaskKey[Unit]("run-partest", "Runs the partest test suite against the quick.")
- lazy val runPartestSingle = InputKey[Unit]("run-partest-single", "Runs a single partest test against quick.")
- lazy val runPartestFailed = TaskKey[Unit]("run-partest-failed", "Runs failed partest tests.")
- lazy val runPartestGrep = InputKey[Unit]("run-partest-grep", "Runs a single partest test against quick.")
- lazy val partestRunner = TaskKey[PartestRunner]("partest-runner", "Creates a runner that can run partest suites")
- lazy val partestTests = TaskKey[Map[String, Seq[File]]]("partest-tests", "Creates a map of test-type to a sequence of the test files/directoryies to test.")
- lazy val partestDirs = SettingKey[Map[String,File]]("partest-dirs", "The map of partest test type to directory associated with that test type")
-
- lazy val partestTaskSettings: Seq[Setting[_]] = Seq(
- javaOptions in partestRunner := Seq("-Xmx512M -Xms256M"),
- partestDirs <<= baseDirectory apply { bd =>
- partestTestTypes map (kind => kind -> (bd / "test" / "files" / kind)) toMap
- },
- partestRunner <<= partestRunnerTask(fullClasspath in Runtime, javaOptions in partestRunner),
- partestTests <<= partestTestsTask(partestDirs),
- runPartest <<= runPartestTask(partestRunner, partestTests, scalacOptions in Test),
- runPartestSingle <<= runSingleTestTask(partestRunner, partestDirs, scalacOptions in Test),
- runPartestFailed <<= runPartestTask(partestRunner, partestTests, scalacOptions in Test, Seq("--failed"))
- )
-
- // What's fun here is that we want "*.scala" files *and* directories in the base directory...
- def partestResources(base: File, testType: String): PathFinder = testType match {
- case "res" => base ** "*.res"
- // TODO - Only allow directories that have "*.scala" children...
- case _ => base * "*" filter { f => !f.getName.endsWith(".obj") && (f.isDirectory || f.getName.endsWith(".scala")) }
- }
- lazy val partestTestTypes = Seq("run", "jvm", "pos", "neg", "res", "shootout", "scalap", "specialized", "presentation", "scalacheck")
-
- // TODO - Figure out how to specify only a subset of resources...
- def partestTestsTask(testDirs: ScopedSetting[Map[String,File]]): Project.Initialize[Task[Map[String, Seq[File]]]] =
- testDirs map (m => m map { case (kind, dir) => kind -> partestResources(dir, kind).get })
-
- // TODO - Split partest task into Configurations and build a Task for each Configuration.
- // *then* mix all of them together for run-testsuite or something clever like this.
- def runPartestTask(runner: ScopedTask[PartestRunner], testRuns: ScopedTask[Map[String,Seq[File]]], scalacOptions: ScopedTask[Seq[String]], extraArgs: Seq[String] = Seq()): Initialize[Task[Unit]] = {
- (runner, testRuns, scalacOptions, streams) map {
- (runner, runs, scalaOpts, s) => runPartestImpl(runner, runs, scalaOpts, s, extraArgs)
- }
- }
- private def runPartestImpl(runner: PartestRunner, runs: Map[String, Seq[File]], scalacOptions: Seq[String], s: TaskStreams, extras: Seq[String] = Seq()): Unit = {
- val testArgs = runs.toSeq collect { case (kind, files) if files.nonEmpty => Seq("-" + kind, files mkString ",") } flatten
- val extraArgs = scalacOptions flatMap (opt => Seq("-scalacoption", opt))
-
- import collection.JavaConverters._
- val results = runner run Array(testArgs ++ extraArgs ++ extras: _*) asScala
- // TODO - save results
- val failures = results collect {
- case (path, "FAIL") => path + " [FAILED]"
- case (path, "TIMEOUT") => path + " [TIMEOUT]"
- }
-
- if (failures.isEmpty)
- s.log.info(""+results.size+" tests passed.")
- else {
- failures foreach (s.log error _)
- error("Test Failures! ("+failures.size+" of "+results.size+")")
- }
- }
-
- def convertTestsForAutoComplete(tests: Map[String, Seq[File]]): (Set[String], Set[String]) =
- (tests.keys.toSet, tests.values flatMap (_ map cleanFileName) toSet)
-
- /** Takes a test file, as sent ot Partest, and cleans it up for auto-complete */
- def cleanFileName(file: File): String = {
- // TODO - Something intelligent here
- val TestPattern = ".*/test/(.*)".r
- file.getCanonicalPath match {
- case TestPattern(n) => n
- case _ => file.getName
- }
- }
-
- // TODO - Allow a filter for the second part of this...
- def runSingleTestParser(testDirs: Map[String, File]): State => Parser[(String, String)] = {
- import DefaultParsers._
- state => {
- Space ~> token(NotSpace examples testDirs.keys.toSet) flatMap { kind =>
- val files: Set[String] = testDirs get kind match {
- case Some(dir) =>
- partestResources(dir, kind).get flatMap (_ relativeTo dir) map (_ getName) toSet
- case _ =>
- Set()
- }
- Space ~> token(NotSpace examples files) map (kind -> _)
- }
- }
- }
-
- def runSingleTestTask(runner: ScopedTask[PartestRunner], testDirs: ScopedSetting[Map[String, File]], scalacOptions: ScopedTask[Seq[String]]) : Initialize[InputTask[Unit]] = {
- import sbinary.DefaultProtocol._
-
- InputTask(testDirs apply runSingleTestParser) { result =>
- (runner, result, testDirs, scalacOptions, streams) map {
- case (r, (kind, filter), dirs, o, s) =>
- // TODO - Use partest resources somehow to filter the filter correctly....
- val files: Seq[File] =
- if (filter == "*") partestResources(dirs(kind), kind).get
- else (dirs(kind) * filter).get
-
- runPartestImpl(r, Map(kind -> files), o, s)
- }
- }
- }
-
- def partestRunnerTask(classpath: ScopedTask[Classpath], javacOptions: TaskKey[Seq[String]]): Project.Initialize[Task[PartestRunner]] =
- (classpath, javacOptions) map ((cp, opts) => new PartestRunner(Build.data(cp), opts mkString " "))
-}
-
-class PartestRunner(classpath: Seq[File], javaOpts: String) {
- // Classloader that does *not* have this as parent, for differing Scala version.
- lazy val classLoader = new java.net.URLClassLoader(classpath.map(_.toURI.toURL).toArray, null)
- lazy val (mainClass, mainMethod) = try {
- val c = classLoader.loadClass("scala.tools.partest.nest.SBTRunner")
- val m = c.getMethod("mainReflect", classOf[Array[String]])
- (c,m)
- }
- lazy val classPathArgs = Seq("-cp", classpath.map(_.getAbsoluteFile).mkString(java.io.File.pathSeparator))
- def run(args: Array[String]): java.util.Map[String,String] = try {
- // TODO - undo this settings after running. Also globals are bad.
- System.setProperty("partest.java_opts", javaOpts)
- val allArgs = (classPathArgs ++ args).toArray
- mainMethod.invoke(null, allArgs).asInstanceOf[java.util.Map[String,String]]
- } catch {
- case e =>
- //error("Could not run Partest: " + e)
- throw e
- }
-}
diff --git a/project/Release.scala b/project/Release.scala
deleted file mode 100644
index feab8bdc8c..0000000000
--- a/project/Release.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-import sbt._
-import Keys._
-
-object Release {
-
- // TODO - Just make the STARR artifacts and dump the sha1 files.
-
- val starrLibs = Seq("scala-library.jar", "scala-reflect.jar", "scala-compiler.jar", "jline.jar")
-
- val pushStarr = Command.command("new-starr") { (state: State) =>
- /*val extracted = Project.extract(state)
- import extracted._
- // First run tests
- val (s1, result) = runTask(test in Test, state)
- // If successful, package artifacts
- val (s2, distDir) = runTask(makeExplodedDist, s1)
- // Then copy new libs in place
- val bd = extracted get baseDirectory
- for {
- jarName <- starrLibs
- jar = distDir / "lib" / jarName
- if jar.exists
- } IO.copyFile(jar, bd / "lib" / jarName)
- // Invalidate SHA1 files.
- ShaResolve.removeInvalidShaFiles(bd)
- // Now run tests *again*?
- s2*/
- state
- }
-}
diff --git a/project/RemoteDependencies.scala b/project/RemoteDependencies.scala
deleted file mode 100644
index 705b9dc402..0000000000
--- a/project/RemoteDependencies.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-import sbt._
-import Keys._
-import ScalaBuildKeys._
-
-
-object RemoteDependencies {
- def buildSettings(externalProjects: Set[URI], localScala: Setting[_]): Seq[Setting[_]] = Seq(
- commands += Command.command("fix-uri-projects") { (state: State) =>
- if(state.get(buildFixed) getOrElse false) state
- else {
- // TODO -fix up scalacheck's dependencies!
- val extracted = Project.extract(state)
- import extracted._
- val scalaVersionString = extracted get version
-
- def fix(s: Setting[_]): Setting[_] = s match {
- case ScopedExternalSetting(p, scalaInstance.key, setting) if externalProjects(p) => localScala mapKey Project.mapScope(_ => s.key.scope)
- // TODO - Fix Actors dependency...
- //case ScopedExternalSetting(p, libraryDependencies.key, setting) if externalProjects(p) => fixProjectDeps(s)
- case s => s
- }
- val transformed = session.mergeSettings map ( s => fix(s) )
- val scopes = transformed collect { case ScopedExternalSetting(p, _, s) if externalProjects(p) => s.key.scope } toSet
- // Create some fixers so we don't download scala or rely on it.
- // Also add dependencies that disappear in 2.10 for now...
- val fixers = for { scope <- scopes
- setting <- Seq(autoScalaLibrary := false,
- crossPaths := false,
- scalaVersion := scalaVersionString)
- } yield setting mapKey Project.mapScope(_ => scope)
- val newStructure = Load.reapply(transformed ++ fixers, structure)
- Project.setProject(session, newStructure, state).put(buildFixed, true)
- }
- },
- onLoad in Global <<= (onLoad in Global) apply (_ andThen { (state: State) =>
- "fix-uri-projects" :: state
- })
- )
-}
-
-
-
-/** Matcher to make updated remote project references easier. */
-object ScopedExternalSetting {
- def unapply[T](s: Setting[_]): Option[(URI, AttributeKey[_], Setting[_])] =
- s.key.scope.project match {
- case Select(p @ ProjectRef(uri, _)) => Some((uri, s.key.key, s))
- case _ => None
- }
-}
-
-
-
diff --git a/project/Sametest.scala b/project/Sametest.scala
deleted file mode 100644
index 6f12eb24b3..0000000000
--- a/project/Sametest.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-import sbt._
-
-import Build._
-import Keys._
-
-// This code is adapted from scala.tools.ant.Same by Gilles Dubochet.
-object SameTest {
-
- def checkSameBinaryProjects(lhs: Project, rhs: Project): Project.Initialize[Task[Unit]] =
- (classDirectory in Compile in lhs, classDirectory in Compile in rhs,
- compile in Compile in lhs, compile in Compile in rhs, streams) map { (lhs,rhs, _, _, s) =>
- // Now we generate a complete set of relative files and then
- def relativeClasses(dir: File) = (dir ** "*.class").get.flatMap(IO.relativize(dir,_).toList)
- // This code adapted from SameTask in the compiler.
- def hasDifferentFiles(filePairs: Seq[(File,File)]): Boolean = {
- filePairs exists { case (a,b) =>
- if (!a.canRead || !b.canRead) {
- s.log.error("Either ["+a+"] or ["+b+"] is missing.")
- true
- } else {
- s.log.debug("Checking for binary differences in ["+a+"] against ["+b+"].")
- val diff = !checkSingleFilePair(a,b)
- if(diff) s.log.error("["+a+"] differs from ["+b+"]")
- diff
- }
- }
- }
- val allClassMappings = (relativeClasses(lhs) ++ relativeClasses(rhs)).distinct
- val comparisons = allClassMappings.map(f => new File(lhs, f) -> new File(rhs, f))
- val result = hasDifferentFiles(comparisons)
- if (result) error("Binary artifacts differ.")
- }
-
- val bufferSize = 1024
-
- // Tests whether two files are binary equivalents of each other.
- def checkSingleFilePair(originFile: File, destFile: File): Boolean = {
- Using.fileInputStream(originFile) { originStream =>
- Using.fileInputStream(destFile) { destStream =>
- val originBuffer = new Array[Byte](bufferSize)
- val destBuffer = new Array[Byte](bufferSize)
- var equalNow = true
- var originRemaining = originStream.read(originBuffer)
- var destRemaining = destStream.read(destBuffer)
- while (originRemaining > 0 && equalNow) {
- if (originRemaining == destRemaining) {
- for (idx <- 0 until originRemaining) {
- equalNow = equalNow && (originBuffer(idx) == destBuffer(idx))
- }
- } else {
- equalNow = false
- }
- originRemaining = originStream.read(originBuffer)
- destRemaining = destStream.read(destBuffer)
- }
- if (destRemaining > 0) equalNow = false
- equalNow
- }
- }
- }
-
-
-}
diff --git a/project/ScalaBuildKeys.scala b/project/ScalaBuildKeys.scala
deleted file mode 100644
index 9e495de19f..0000000000
--- a/project/ScalaBuildKeys.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-import sbt._
-import Keys._
-
-object ScalaBuildKeys {
- val lockerLock = TaskKey[Unit]("locker-lock", "Locks the locker layer of the compiler build such that it won't rebuild on changed source files.")
- val lockerUnlock = TaskKey[Unit]("locker-unlock", "Unlocks the locker layer of the compiler so that it will be recompiled on changed source files.")
- val lockFile = SettingKey[File]("lock-file", "Location of the lock file compiling this project.")
- val lock = TaskKey[Unit]("lock", "Locks this project so it won't be recompiled.")
- val unlock = TaskKey[Unit]("unlock", "Unlocks this project so it will be recompiled.")
- val makeDist = TaskKey[File]("make-dist", "Creates a mini-distribution (scala home directory) for this build in a zip file.")
- val makeExplodedDist = TaskKey[File]("make-exploded-dist", "Creates a mini-distribution (scala home directory) for this build in a directory.")
- val makeDistMappings = TaskKey[Seq[(File, String)]]("make-dist-mappings", "Creates distribution mappings for creating zips,jars,directorys,etc.")
- val buildFixed = AttributeKey[Boolean]("build-uri-fixed")
- val genBinRunner = TaskKey[ScalaToolRunner]("gen-bin-runner", "Creates a utility to generate script files for Scala.")
- val genBin = TaskKey[Seq[(File,String)]]("gen-bin", "Creates script files for Scala distribution.")
- val binDir = SettingKey[File]("binaries-directory", "Directory where binary scripts will be located.")
- val genBinQuick = TaskKey[Seq[(File,String)]]("gen-quick-bin", "Creates script files for testing against current Scala build classfiles (not local dist).")
- val runManmakerMan = TaskKey[Seq[(File,String)]]("make-man", "Runs the man maker project to generate man pages")
- val runManmakerHtml = TaskKey[Seq[(File,String)]]("make-html", "Runs the man maker project to generate html pages")
- val checkSame = TaskKey[Unit]("check-same-binaries", "checks whether or not the class files generated by scala are the same.")
- val checkSameLibrary = TaskKey[Unit]("check-same-lib-binaries", "checks whether or not the librayr class files generated by scala are the same.")
- val checkSameCompiler = TaskKey[Unit]("check-same-comp-binaries", "checks whether or not the compiler class files generated by scala are the same.")
-}
diff --git a/project/ScalaToolRunner.scala b/project/ScalaToolRunner.scala
deleted file mode 100644
index d7338a54b3..0000000000
--- a/project/ScalaToolRunner.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-import sbt._
-import Keys._
-
-/** Reflection helper that runs ScalaTool.
- * TODO - When SBT is on 2.10.x try to use Dynamic + Reflection. COULD BE FUN.
- */
-class ScalaToolRunner(classpath: Classpath) {
- // TODO - Don't use the ant task directly...
- lazy val classLoader = new java.net.URLClassLoader(classpath.map(_.data.toURI.toURL).toArray, null)
- lazy val mainClass = classLoader.loadClass("scala.tools.ant.ScalaTool")
- lazy val executeMethod = mainClass.getMethod("execute")
- lazy val setFileMethod = mainClass.getMethod("setFile", classOf[java.io.File])
- lazy val setClassMethod = mainClass.getMethod("setClass", classOf[String])
- lazy val setClasspathMethod = mainClass.getMethod("setClassPath", classOf[String])
- lazy val instance = mainClass.newInstance()
-
- def setClass(cls: String): Unit = setClassMethod.invoke(instance, cls)
- def setFile(file: File): Unit = setFileMethod.invoke(instance, file)
- def setClasspath(cp: String): Unit = setClasspathMethod.invoke(instance, cp)
- def execute(): Unit = executeMethod.invoke(instance)
-}
diff --git a/project/ShaResolve.scala b/project/ShaResolve.scala
deleted file mode 100644
index e5b25a29cf..0000000000
--- a/project/ShaResolve.scala
+++ /dev/null
@@ -1,148 +0,0 @@
-import sbt._
-
-import Build._
-import Keys._
-import Project.Initialize
-import scala.collection.{ mutable, immutable }
-import scala.collection.parallel.CompositeThrowable
-import java.security.MessageDigest
-
-case class Credentials(user: String, pw: String)
-
-/** Helpers to resolve SHA artifacts from typesafe repo. */
-object ShaResolve {
- import dispatch.{Http,url}
- val remote_urlbase="http://typesafe.artifactoryonline.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap"
-
- val pullBinaryLibs = TaskKey[Unit]("pull-binary-libs", "Pulls binary libs by the SHA key.")
- val pushBinaryLibs = TaskKey[Unit]("push-binary-libs", "Pushes binary libs whose SHA has changed.")
- val binaryLibCache = SettingKey[File]("binary-lib-cache", "Location of the cache of binary libs for this scala build.")
-
- def settings: Seq[Setting[_]] = Seq(
- binaryLibCache in ThisBuild := file(System.getProperty("user.home")) / ".sbt" / "cache" / "scala",
- pullBinaryLibs in ThisBuild <<= (baseDirectory, binaryLibCache, streams) map resolveLibs,
- pushBinaryLibs in ThisBuild <<= (baseDirectory, streams) map getCredentialsAndPushFiles
- )
-
- def resolveLibs(dir: File, cacheDir: File, s: TaskStreams): Unit = loggingParallelExceptions(s) {
- val files = (dir / "test" / "files" ** "*.desired.sha1") +++ (dir / "lib" ** "*.desired.sha1")
- for {
- (file, name) <- (files x relativeTo(dir)).par
- uri = name.dropRight(13).replace('\\', '/')
- jar = dir / uri
- if !jar.exists || !isValidSha(file)
- sha = getShaFromShafile(file)
- } pullFile(jar, sha + "/" + uri, cacheDir, sha, s)
- }
-
- /** This method removes all SHA1 files that don't match their corresponding JAR. */
- def removeInvalidShaFiles(dir: File): Unit = {
- val files = (dir / "test" / "files" ** "*.desired.sha1") +++ (dir / "lib" ** "*.desired.sha1")
- for {
- (file, name) <- (files x relativeTo(dir)).par
- uri = name.dropRight(13).replace('\\', '/')
- jar = dir / uri
- if !jar.exists || !isValidSha(file)
- } IO.delete(jar)
- }
- def getCredentials: Credentials = System.out.synchronized {
- val user = (SimpleReader.readLine("Please enter your STARR username> ") getOrElse error("No username provided."))
- val password = (SimpleReader.readLine("Please enter your STARR password> ", Some('*')) getOrElse error("No password provided."))
- Credentials(user, password)
- }
-
- def getCredentialsAndPushFiles(dir: File, s: TaskStreams): Unit =
- pushFiles(dir, getCredentials, s)
-
- def pushFiles(dir: File, cred: Credentials, s: TaskStreams): Unit = loggingParallelExceptions(s) {
- val files = (dir / "test" / "files" ** "*.jar") +++ (dir / "lib" ** "*.jar")
- for {
- (jar, name) <- (files x relativeTo(dir)).par
- shafile = dir / (name + ".desired.sha1")
- if !shafile.exists || !isValidSha(shafile)
- } pushFile(jar, name, cred, s)
- }
-
- @inline final def loggingParallelExceptions[U](s: TaskStreams)(f: => U): U = try f catch {
- case t: CompositeThrowable =>
- s.log.error("Error during parallel execution, GET READY FOR STACK TRACES!!")
- t.throwables foreach (t2 => s.log.trace(t2))
- throw t
- }
-
- // TODO - Finish this publishing aspect.
-
- def getShaFromShafile(file: File): String = parseShaFile(file)._2
-
- // This should calculate the SHA sum of a file the same as the linux process.
- def calculateSha(file: File): String = {
- val digest = MessageDigest.getInstance("SHA1")
- val in = new java.io.FileInputStream(file);
- val buffer = new Array[Byte](8192)
- try {
- def read(): Unit = in.read(buffer) match {
- case x if x <= 0 => ()
- case size => digest.update(buffer, 0, size); read()
- }
- read()
- } finally in.close()
- val sha = convertToHex(digest.digest())
- sha
- }
-
- def convertToHex(data: Array[Byte]): String = {
- def byteToHex(b: Int) =
- if ((0 <= b) && (b <= 9)) ('0' + b).toChar
- else ('a' + (b-10)).toChar
- val buf = new StringBuffer
- for (i <- 0 until data.length) {
- buf append byteToHex((data(i) >>> 4) & 0x0F)
- buf append byteToHex(data(i) & 0x0F)
- }
- buf.toString
- }
- // Parses a sha file into a file and a sha.
- def parseShaFile(file: File): (File, String) =
- IO.read(file).split("\\s") match {
- case Array(sha, filename) if filename.startsWith("?") => (new File(file.getParentFile, filename.drop(1)), sha)
- case Array(sha, filename) if filename.startsWith("*") => (new File(file.getParentFile, filename.drop(1)), sha)
- case Array(sha, filename) => (new File(file.getParentFile, filename), sha)
- case _ => error(file.getAbsolutePath + " is an invalid sha file")
- }
-
-
- def isValidSha(file: File): Boolean =
- try {
- val (jar, sha) = parseShaFile(file)
- jar.exists && calculateSha(jar) == sha
- } catch {
- case t: Exception => false
- }
-
-
- def pullFile(file: File, uri: String, cacheDir: File, sha: String, s: TaskStreams): Unit = {
- val cachedFile = cacheDir / uri
- if (!cachedFile.exists || calculateSha(cachedFile) != sha) {
- // Ensure the directory for the cache exists.
- cachedFile.getParentFile.mkdirs()
- val url = remote_urlbase + "/" + uri
- val fous = new java.io.FileOutputStream(cachedFile)
- s.log.info("Pulling [" + cachedFile + "] to cache")
- try Http(dispatch.url(url) >>> fous) finally fous.close()
- }
- s.log.info("Pulling [" + file + "] from local cache")
- IO.copyFile(cachedFile, file)
- }
-
- // Pushes a file and writes the new .desired.sha1 for git.
- def pushFile(file: File, uri: String, cred: Credentials, s: TaskStreams): Unit = {
- val sha = calculateSha(file)
- val url = remote_urlbase + "/" + sha + "/" + uri
- val sender = dispatch.url(url).PUT.as(cred.user,cred.pw) <<< (file, "application/java-archive")
- // TODO - output to logger.
- Http(sender >>> System.out)
- val shafile = file.getParentFile / (file.getName + ".desired.sha1")
- IO.touch(shafile)
- IO.write(shafile, sha + " ?" + file.getName)
- }
-}
diff --git a/project/Testing.scala b/project/Testing.scala
deleted file mode 100644
index 5b4135a31a..0000000000
--- a/project/Testing.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-import sbt._
-import Keys._
-import partest._
-import SameTest._
-import ScalaBuildKeys._
-
-/** All settings/projects relating to testing. */
-trait Testing { self: ScalaBuild.type =>
-
- lazy val testsuiteSettings: Seq[Setting[_]] = compilerDependentProjectSettings ++ partestTaskSettings ++ VerifyClassLoad.settings ++ Seq(
- unmanagedBase <<= baseDirectory / "test/files/lib",
- fullClasspath in VerifyClassLoad.checkClassLoad <<= (fullClasspath in scalaLibrary in Runtime).identity,
- autoScalaLibrary := false,
- checkSameLibrary <<= checkSameBinaryProjects(quickLib, strappLib),
- checkSameCompiler <<= checkSameBinaryProjects(quickComp, strappComp),
- checkSame <<= (checkSameLibrary, checkSameCompiler) map ((a,b) => ()),
- autoScalaLibrary := false
- )
- lazy val continuationsTestsuiteSettings: Seq[Setting[_]] = testsuiteSettings ++ Seq(
- scalacOptions in Test <++= (exportedProducts in Compile in continuationsPlugin) map {
- case Seq(cpDir) => Seq("-Xplugin-require:continuations", "-P:continuations:enable", "-Xplugin:"+cpDir.data.getAbsolutePath)
- },
- partestDirs <<= baseDirectory apply { bd =>
- def mkFile(name: String) = bd / "test" / "files" / name
- def mkTestType(name: String) = name.drop("continuations-".length).toString
- Seq("continuations-neg", "continuations-run") map (t => mkTestType(t) -> mkFile(t)) toMap
- }
- )
- val testsuite = (
- Project("testsuite", file("."))
- settings (testsuiteSettings:_*)
- dependsOn (scalaLibrary, scalaCompiler, partest, scalacheck)
- )
- val continuationsTestsuite = (
- Project("continuations-testsuite", file("."))
- settings (continuationsTestsuiteSettings:_*)
- dependsOn (partest, scalaLibrary, scalaCompiler)
- )
-
-}
-
diff --git a/project/VerifyClassLoad.scala b/project/VerifyClassLoad.scala
deleted file mode 100644
index c8eebb1159..0000000000
--- a/project/VerifyClassLoad.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-import sbt._
-
-import Build._
-import Keys._
-
-// This is helper code to validate that generated class files will succed in bytecode verification at class-load time.
-object VerifyClassLoad {
- lazy val checkClassLoad: TaskKey[Unit] = TaskKey("check-class-load", "checks whether or not the class files generated by scala are deemed acceptable by classloaders.")
- lazy val checkClassRunner: TaskKey[ClassVerifyRunner] = TaskKey("check-class-runner", "A wrapper around reflective calls to the VerifyClass class.")
-
-
- def settings: Seq[Setting[_]] = Seq(
- checkClassRunner <<= (fullClasspath in Runtime) map (cp => new ClassVerifyRunner(data(cp))),
- fullClasspath in checkClassLoad := Seq(),
- checkClassLoad <<= (checkClassRunner, fullClasspath in checkClassLoad, streams) map { (runner, dirs, s) =>
- import collection.JavaConverters._
- val results = runner.run(data(dirs).map(_.getAbsolutePath).toArray).asScala
-
- s.log.info("Processed " + results.size + " classes.")
- val errors = results.filter(_._2 != null)
- for( (name, result) <- results; if result != null) {
- s.log.error(name + " had error: " + result)
- }
- if(errors.size > 0) error("Classload validation errors encountered")
- ()
- }
- )
-
- // TODO - Use
- class ClassVerifyRunner(classpath: Seq[File]) {
- // Classloader that does *not* have this as parent, for differing Scala version.
- lazy val classLoader = new java.net.URLClassLoader(classpath.map(_.toURI.toURL).toArray, null)
- lazy val (mainClass, mainMethod) = try {
- val c = classLoader.loadClass("scala.tools.util.VerifyClass")
- val m = c.getMethod("run", classOf[Array[String]])
- (c,m)
- }
- def run(args: Array[String]): java.util.Map[String,String] = try {
- mainMethod.invoke(null, args).asInstanceOf[java.util.Map[String,String]]
- } catch {
- case e =>
- //error("Could not run Partest: " + e)
- throw e
- }
- }
-}
diff --git a/project/Versions.scala b/project/Versions.scala
deleted file mode 100644
index 57e274c15c..0000000000
--- a/project/Versions.scala
+++ /dev/null
@@ -1,142 +0,0 @@
-import sbt._
-import Keys._
-import java.util.Properties
-import scala.util.control.Exception.catching
-import java.lang.{NumberFormatException => NFE}
-import java.io.FileInputStream
-import com.jsuereth.git.GitRunner
-import com.jsuereth.git.GitKeys.gitRunner
-
-case class VersionInfo(canonical: String,
- maven: String,
- osgi: String)
-
-/** this file is responsible for setting up Scala versioning schemes and updating all the necessary bits. */
-object Versions {
- val buildNumberFile = SettingKey[File]("scala-build-number-file")
- // TODO - Make this a setting?
- val buildNumberProps = SettingKey[BaseBuildNumber]("scala-build-number-props")
- val buildRelease = SettingKey[Boolean]("scala-build-release", "This is set to true if we're building a release.")
- val mavenSuffix = SettingKey[String]("scala-maven-suffix", "This is set to whatever maven suffix is required.")
-
- val gitSha = TaskKey[String]("scala-git-sha", "The sha of the current git commit.")
- val gitDate = TaskKey[String]("scala-git-date", "The date of the current git commit.")
-
- val mavenVersion = SettingKey[String]("scala-maven-version", "The maven version number.")
- val osgiVersion = TaskKey[String]("scala-osgi-version", "The OSGi version number.")
- val canonicalVersion = TaskKey[String]("scala-canonical-version", "The canonical version number.")
-
- val scalaVersions = TaskKey[VersionInfo]("scala-version-info", "The scala versions used for this build.")
-
-
-
- def settings: Seq[Setting[_]] = Seq(
- buildNumberFile <<= baseDirectory apply (_ / "build.number"),
- buildNumberProps <<= buildNumberFile apply loadBuildNumberProps,
- buildRelease := Option(System.getProperty("build.release")) map (!_.isEmpty) getOrElse false,
- mavenSuffix <<= buildRelease apply pickMavenSuffix,
- mavenVersion <<= (buildNumberProps, mavenSuffix) apply makeMavenVersion,
- gitSha <<= (gitRunner, baseDirectory, streams) map getGitSha,
- gitDate <<= (gitRunner, baseDirectory, streams) map getGitDate,
- osgiVersion <<= (buildNumberProps, gitDate, gitSha) map makeOsgiVersion,
- canonicalVersion <<= (buildRelease, mavenVersion, buildNumberProps, gitDate, gitSha) map makeCanonicalVersion,
- scalaVersions <<= (canonicalVersion, mavenVersion, osgiVersion) map VersionInfo.apply
- )
-
-
- /** This generates a properties file, if it does not already exist, with the maximum lastmodified timestamp
- * of any source file. */
- def generateVersionPropertiesFile(name: String)(dir: File, versions: VersionInfo, skip: Boolean, s: TaskStreams): Seq[File] = {
- // TODO - We can probably clean this up by moving caching bits elsewhere perhaps....
- val target = dir / name
- // TODO - Regenerate on triggers, like recompilation or something...
- def hasSameVersion: Boolean = {
- val props = new java.util.Properties
- val in = new java.io.FileInputStream(target)
- try props.load(in) finally in.close()
- versions.canonical == (props getProperty "version.number")
- }
- if (!target.exists || !(skip || hasSameVersion)) {
- makeVersionPropertiesFile(target, versions)
- }
- target :: Nil
- }
-
- // This creates the *.properties file used to determine the current version of scala at runtime. TODO - move these somewhere utility like.
- def makeVersionPropertiesFile(f: File, versions: VersionInfo): Unit =
- IO.write(f, "version.number = "+versions.canonical+"\n"+
- "osgi.number = "+versions.osgi+"\n"+
- "maven.number = "+versions.maven+"\n"+
- "copyright.string = Copyright 2002-2013, LAMP/EPFL")
-
- def makeCanonicalVersion(isRelease: Boolean, mvnVersion: String, base: BaseBuildNumber, gitDate: String, gitSha: String): String =
- if(isRelease) mvnVersion
- else {
- val suffix = if(base.bnum > 0) "-%d".format(base.bnum) else ""
- "%s.%s.%s%s-%s-%s" format (base.major, base.minor, base.patch, suffix, gitDate, gitSha)
- }
-
- def makeMavenVersion(base: BaseBuildNumber, suffix: String): String = {
- val firstSuffix = if(base.bnum > 0) "-%d".format(base.bnum) else ""
- "%d.%d.%d%s%s" format (base.major, base.minor, base.patch, firstSuffix, suffix)
- }
-
- def makeOsgiVersion(base: BaseBuildNumber, gitDate: String, gitSha: String): String = {
- val suffix = if(base.bnum > 0) "-%d".format(base.bnum) else ""
- "%s.%s.%s.v%s%s-%s" format (base.major, base.minor, base.patch, gitDate, suffix, gitSha)
- }
-
- /** Determines what the maven sufffix should be for this build. */
- def pickMavenSuffix(isRelease: Boolean): String = {
- def default = if(isRelease) "" else "-SNAPSHOT"
- Option(System.getProperty("maven.version.suffix")) getOrElse default
- }
-
- /** Loads the build.number properties file into SBT. */
- def loadBuildNumberProps(file: File): BaseBuildNumber = {
- val fin = new FileInputStream(file)
- try {
- val props = new Properties()
- props.load(fin)
- def getProp(name: String): Int =
- (for {
- v <- Option(props.getProperty(name))
- v2 <- catching(classOf[NFE]) opt v.toInt
- } yield v2) getOrElse sys.error("Could not convert %s to integer!" format (name))
-
- BaseBuildNumber(
- major=getProp("version.major"),
- minor=getProp("version.minor"),
- patch=getProp("version.patch"),
- bnum =getProp("version.bnum")
- )
- } finally fin.close()
- }
-
-
- def getGitDate(git: GitRunner, baseDirectory: File, s: TaskStreams): String = {
- val lines = getGitLines("log","-1","--format=\"%ci\"")(git,baseDirectory, s)
- val line = if(lines.isEmpty) sys.error("Could not retreive git commit sha!") else lines.head
- // Lines *always* start with " for some reason...
- line drop 1 split "\\s+" match {
- case Array(date, time, _*) => "%s-%s" format (date.replaceAll("\\-", ""), time.replaceAll(":",""))
- case _ => sys.error("Could not parse git date: " + line)
- }
- }
-
- def getGitSha(git: GitRunner, baseDirectory: File, s: TaskStreams): String = {
- val lines = getGitLines("log","-1","--format=\"%H\"", "HEAD")(git,baseDirectory, s)
- val line = if(lines.isEmpty) sys.error("Could not retreive git commit sha!") else lines.head
- val noquote = if(line startsWith "\"") line drop 1 else line
- val nog = if(noquote startsWith "g") noquote drop 1 else noquote
- nog take 10
- }
-
- def getGitLines(args: String*)(git: GitRunner, baseDirectory: File, s: TaskStreams): Seq[String] =
- git(args: _*)(baseDirectory, s.log) split "[\r\n]+"
-}
-
-
-case class BaseBuildNumber(major: Int, minor: Int, patch: Int, bnum: Int) {
- override def toString = "BaseBuildNumber(%d.%d.%d-%d)" format (major, minor, patch, bnum)
-}
diff --git a/project/plugins.sbt b/project/plugins.sbt
deleted file mode 100644
index fdf37e31a6..0000000000
--- a/project/plugins.sbt
+++ /dev/null
@@ -1,9 +0,0 @@
-resolvers += Resolver.url("Typesafe nightlies", url("https://typesafe.artifactoryonline.com/typesafe/ivy-snapshots/"))(Resolver.ivyStylePatterns)
-
-resolvers += Resolver.url("scalasbt", new URL("http://scalasbt.artifactoryonline.com/scalasbt/sbt-plugin-releases"))(Resolver.ivyStylePatterns)
-
-resolvers += "jgit-repo" at "http://download.eclipse.org/jgit/maven"
-
-libraryDependencies += "net.databinder" % "dispatch-http_2.9.1" % "0.8.6"
-
-
diff --git a/project/project/Build.scala b/project/project/Build.scala
deleted file mode 100644
index d3a08b62ba..0000000000
--- a/project/project/Build.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-import sbt._
-object PluginDef extends Build {
- override def projects = Seq(root)
- lazy val root = Project("plugins", file(".")) dependsOn(proguard, git)
- lazy val proguard = uri("git://github.com/jsuereth/xsbt-proguard-plugin.git#sbt-0.12")
- lazy val git = uri("git://github.com/sbt/sbt-git.git#scala-build")
-}