summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore4
-rw-r--r--project/Build.scala554
-rw-r--r--project/Layers.scala92
-rw-r--r--project/Partest.scala141
-rw-r--r--project/Release.scala115
-rw-r--r--project/Sametest.scala66
-rw-r--r--project/ShaResolve.scala117
-rw-r--r--project/VerifyClassLoad.scala46
-rw-r--r--project/plugins.sbt9
-rw-r--r--project/project/Build.scala7
-rw-r--r--src/compiler/scala/reflect/runtime/JavaToScala.scala6
-rw-r--r--src/manual/scala/tools/docutil/EmitHtml.scala154
-rw-r--r--src/manual/scala/tools/docutil/EmitManPage.scala16
-rw-r--r--src/partest/scala/tools/partest/nest/SBTRunner.scala65
14 files changed, 1229 insertions, 163 deletions
diff --git a/.gitignore b/.gitignore
index d392f0e82c..8b8b6978b8 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1 +1,5 @@
*.jar
+*.obj
+*.log
+*~
+target/
diff --git a/project/Build.scala b/project/Build.scala
new file mode 100644
index 0000000000..297f82e515
--- /dev/null
+++ b/project/Build.scala
@@ -0,0 +1,554 @@
+import sbt._
+import Keys._
+import partest._
+import SameTest._
+
+object ScalaBuild extends Build with Layers {
+ // New tasks/settings specific to the scala build.
+ lazy val lockerLock: TaskKey[Unit] = TaskKey("locker-lock",
+ "Locks the locker layer of the compiler build such that it won't rebuild on changed source files.")
+ lazy val lockerUnlock: TaskKey[Unit] = TaskKey("locker-unlock",
+ "Unlocks the locker layer of the compiler so that it will be recompiled on changed source files.")
+ lazy val lockFile: SettingKey[File] = SettingKey("lock-file",
+ "Location of the lock file compiling this project.")
+ lazy val makeDist: TaskKey[File] = TaskKey("make-dist",
+ "Creates a mini-distribution (scala home directory) for this build in a zip file.")
+ lazy val makeExplodedDist: TaskKey[File] = TaskKey("make-exploded-dist",
+ "Creates a mini-distribution (scala home directory) for this build in a directory.")
+ lazy val makeDistMappings: TaskKey[Map[File, String]] = TaskKey("make-dist-mappings",
+ "Creates distribution mappings for creating zips,jars,directorys,etc.")
+ lazy val buildFixed = AttributeKey[Boolean]("build-uri-fixed")
+
+ // Build wide settings:
+ override lazy val settings = super.settings ++ Seq(
+ autoScalaLibrary := false,
+ resolvers += Resolver.url(
+ "Typesafe nightlies",
+ url("https://typesafe.artifactoryonline.com/typesafe/ivy-snapshots/")
+ )(Resolver.ivyStylePatterns),
+ resolvers ++= Seq(
+ "junit interface repo" at "https://repository.jboss.org/nexus/content/repositories/scala-tools-releases",
+ ScalaToolsSnapshots
+ ),
+ organization := "org.scala-lang",
+ version := "2.10.0-SNAPSHOT",
+ pomExtra := <xml:group>
+ <inceptionYear>2002</inceptionYear>
+ <licenses>
+ <license>
+ <name>BSD-like</name>
+ <url>http://www.scala-lang.org/downloads/license.html</url>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
+ </scm>
+ <issueManagement>
+ <system>jira</system>
+ <url>http://issues.scala-lang.org</url>
+ </issueManagement>
+ </xml:group>,
+ commands += Command.command("fix-uri-projects") { (state: State) =>
+ if(state.get(buildFixed) getOrElse false) state
+ else {
+ // TODO -fix up scalacheck's dependencies!
+ val extracted = Project.extract(state)
+ import extracted._
+ def fix(s: Setting[_]): Setting[_] = s match {
+ case ScopedExternalSetting(`scalacheck`, scalaInstance.key, setting) => fullQuickScalaReference mapKey Project.mapScope(_ => s.key.scope)
+ case s => s
+ }
+ val transformed = session.mergeSettings map ( s => fix(s) )
+ val scopes = transformed collect { case ScopedExternalSetting(`scalacheck`, _, s) => s.key.scope } toSet
+ // Create some fixers so we don't download scala or rely on it.
+ val fixers = for { scope <- scopes
+ setting <- Seq(autoScalaLibrary := false, crossPaths := false)
+ } yield setting mapKey Project.mapScope(_ => scope)
+ val newStructure = Load.reapply(transformed ++ fixers, structure)
+ Project.setProject(session, newStructure, state).put(buildFixed, true)
+ }
+ },
+ onLoad in Global <<= (onLoad in Global) apply (_ andThen { (state: State) =>
+ "fix-uri-projects" :: state
+ })
+ )
+
+ // Collections of projects to run 'compile' on.
+ lazy val compiledProjects = Seq(quickLib, quickComp, continuationsLibrary, actors, swing, dbc, forkjoin, fjbg)
+ // Collection of projects to 'package' and 'publish' together.
+ lazy val packagedBinaryProjects = Seq(scalaLibrary, scalaCompiler, swing, dbc, continuationsPlugin, jline, scalap)
+ lazy val partestRunProjects = Seq(testsuite, continuationsTestsuite)
+
+ private def epflPomExtra = (
+ <xml:group>
+ <inceptionYear>2002</inceptionYear>
+ <licenses>
+ <license>
+ <name>BSD-like</name>
+ <url>http://www.scala-lang.org/downloads/license.html</url>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
+ </scm>
+ <issueManagement>
+ <system>jira</system>
+ <url>http://issues.scala-lang.org</url>
+ </issueManagement>
+ </xml:group>
+ )
+
+ // Settings used to make sure publishing goes smoothly.
+ def publishSettings: Seq[Setting[_]] = Seq(
+ ivyScala ~= ((is: Option[IvyScala]) => is.map(_.copy(checkExplicit = false))),
+ pomIncludeRepository := (_ => false),
+ publishMavenStyle := true,
+ makePomConfiguration <<= makePomConfiguration apply (_.copy(configurations = Some(Seq(Compile, Default)))),
+ pomExtra := epflPomExtra
+ )
+
+ // Settings for root project. These are aggregate tasks against the rest of the build.
+ def projectSettings: Seq[Setting[_]] = publishSettings ++ Seq(
+ doc in Compile <<= (doc in documentation in Compile).identity,
+ // These next two aggregate commands on several projects and return results that are to be ignored by remaining tasks.
+ compile in Compile <<= compiledProjects.map(p => compile in p in Compile).join.map(_.head),
+ // TODO - just clean target? i.e. target map IO.deleteRecursively
+ clean <<= (compiledProjects ++ partestRunProjects).map(p => clean in p).dependOn,
+ packageBin in Compile <<= packagedBinaryProjects.map(p => packageBin in p in Compile).join.map(_.head),
+ // TODO - Make sure scalaLibrary has packageDoc + packageSrc from documentation attached...
+ publish <<= packagedBinaryProjects.map(p => publish in p).join.map(_.head),
+ publishLocal <<= packagedBinaryProjects.map(p => publishLocal in p).join.map(_.head),
+ packageDoc in Compile <<= (packageDoc in documentation in Compile).identity,
+ packageSrc in Compile <<= (packageSrc in documentation in Compile).identity,
+ test in Test <<= (runPartest in testsuite, runPartest in continuationsTestsuite, checkSame in testsuite) map { (a,b,c) => () },
+ lockerLock <<= (lockFile in lockerLib, lockFile in lockerComp, compile in Compile in lockerLib, compile in Compile in lockerComp) map { (lib, comp, _, _) =>
+ Seq(lib,comp).foreach(f => IO.touch(f))
+ },
+ lockerUnlock <<= (lockFile in lockerLib, lockFile in lockerComp) map { (lib, comp) =>
+ Seq(lib,comp).foreach(IO.delete)
+ },
+ genBinQuick <<= (genBinQuick in scaladist).identity,
+ makeDist <<= (makeDist in scaladist).identity,
+ makeExplodedDist <<= (makeExplodedDist in scaladist).identity,
+ // Note: We override unmanagedSources so that ~ compile will look at all these sources, then run our aggregated compile...
+ unmanagedSourceDirectories in Compile <<= baseDirectory apply (_ / "src") apply { dir =>
+ Seq("library/scala","actors","compiler","fjbg","swing","continuations/library","forkjoin") map (dir / _)
+ },
+ // TODO - Make exported products == makeDist so we can use this when creating a *real* distribution.
+ commands += Release.pushStarr
+ //commands += Release.setStarrHome
+ )
+ // Note: Root project is determined by lowest-alphabetical project that has baseDirectory as file("."). we use aaa_ to 'win'.
+ lazy val aaa_root = Project("scala", file(".")) settings(projectSettings: _*) settings(ShaResolve.settings: _*)
+
+ // External dependencies used for various projects
+ lazy val externalDeps: Setting[_] = libraryDependencies <<= (sbtVersion)(v =>
+ Seq(
+ "org.apache.ant" % "ant" % "1.8.2",
+ "org.scala-tools.sbt" % "compiler-interface" % v % "provided"
+ )
+ )
+
+ // These are setting overrides for most artifacts in the Scala build file.
+ def settingOverrides: Seq[Setting[_]] = publishSettings ++ Seq(
+ crossPaths := false,
+ publishArtifact in packageDoc := false,
+ publishArtifact in packageSrc := false,
+ target <<= (baseDirectory, name) apply (_ / "target" / _),
+ (classDirectory in Compile) <<= target(_ / "classes"),
+ javacOptions ++= Seq("-target", "1.5", "-source", "1.5"),
+ scalaSource in Compile <<= (baseDirectory, name) apply (_ / "src" / _),
+ javaSource in Compile <<= (baseDirectory, name) apply (_ / "src" / _),
+ autoScalaLibrary := false,
+ unmanagedJars in Compile := Seq(),
+ // Most libs in the compiler use this order to build.
+ compileOrder in Compile := CompileOrder.JavaThenScala,
+ lockFile <<= target(_ / "compile.lock"),
+ skip in Compile <<= lockFile.map(_ exists)
+ )
+
+ // --------------------------------------------------------------
+ // Libraries used by Scalac that change infrequently
+ // (or hopefully so).
+ // --------------------------------------------------------------
+
+ // Jline nested project. Compile this sucker once and be done.
+ lazy val jline = Project("jline", file("src/jline"))
+ // Fast Java Bytecode Generator (nested in every scala-compiler.jar)
+ lazy val fjbg = Project("fjbg", file(".")) settings(settingOverrides : _*)
+ // Forkjoin backport
+ lazy val forkjoin = Project("forkjoin", file(".")) settings(settingOverrides : _*)
+
+ // --------------------------------------------------------------
+ // The magic kingdom.
+ // Layered compilation of Scala.
+ // Stable Reference -> Locker ('Lockable' dev version) -> Quick -> Strap (Binary compatibility testing)
+ // --------------------------------------------------------------
+
+ // Need a report on this...
+ // TODO - Resolve STARR from a repo..
+ lazy val STARR = scalaInstance <<= (appConfiguration, ShaResolve.pullBinaryLibs in ThisBuild) map { (app, _) =>
+ val launcher = app.provider.scalaProvider.launcher
+ val library = file("lib/scala-library.jar")
+ val compiler = file("lib/scala-compiler.jar")
+ val libJars = (file("lib") * "*.jar").get filterNot Set(library, compiler)
+ ScalaInstance("starr", library, compiler, launcher, libJars: _*)
+ }
+
+ // Locker is a lockable Scala compiler that can be built of 'current' source to perform rapid development.
+ lazy val (lockerLib, lockerComp) = makeLayer("locker", STARR)
+ lazy val locker = Project("locker", file(".")) aggregate(lockerLib, lockerComp)
+
+ // Quick is the general purpose project layer for the Scala compiler.
+ lazy val (quickLib, quickComp) = makeLayer("quick", makeScalaReference("locker", lockerLib, lockerComp, fjbg))
+ lazy val quick = Project("quick", file(".")) aggregate(quickLib, quickComp)
+
+ // Reference to quick scala instance.
+ lazy val quickScalaInstance = makeScalaReference("quick", quickLib, quickComp, fjbg)
+ def quickScalaLibraryDependency = unmanagedClasspath in Compile <++= (exportedProducts in quickLib in Compile).identity
+ def quickScalaCompilerDependency = unmanagedClasspath in Compile <++= (exportedProducts in quickComp in Compile).identity
+
+ // Strapp is used to test binary 'sameness' between things built with locker and things built with quick.
+ lazy val (strappLib, strappComp) = makeLayer("strapp", quickScalaInstance)
+
+ // --------------------------------------------------------------
+ // Projects dependent on layered compilation (quick)
+ // --------------------------------------------------------------
+ def addCheaterDependency(projectName: String): Setting[_] =
+ pomPostProcess <<= (version, organization, pomPostProcess) apply { (v,o,k) =>
+ val dependency: scala.xml.Node =
+ <dependency>
+ <groupId>{o}</groupId>
+ <artifactid>{projectName}</artifactid>
+ <version>{v}</version>
+ </dependency>
+ def fixDependencies(node: scala.xml.Node): scala.xml.Node = node match {
+ case <dependencies>{nested@_*}</dependencies> => <dependencies>{dependency}{nested}</dependencies>
+ case x => x
+ }
+ // This is a hack to get around issues where \ and \\ don't work if any of the children are `scala.xml.Group`.
+ def hasDependencies(root: scala.xml.Node): Boolean =
+ (root.child collectFirst {
+ case n: scala.xml.Elem if n.label == "dependencies" => n
+ } isEmpty)
+ // TODO - Keep namespace on project...
+ k andThen {
+ case n @ <project>{ nested@_*}</project> if hasDependencies(n) =>
+ <project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0">{nested}<dependencies>{dependency}</dependencies></project>
+ case <project>{ nested@_*}</project> =>
+ <project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0">{ nested map fixDependencies }</project>
+ }
+ }
+
+ // TODO - in sabbus, these all use locker to build... I think tihs way is better, but let's farm this idea around.
+ // TODO - Actors + swing separate jars...
+ lazy val dependentProjectSettings = settingOverrides ++ Seq(quickScalaInstance, quickScalaLibraryDependency, addCheaterDependency("scala-library"))
+ lazy val actors = Project("actors", file(".")) settings(dependentProjectSettings:_*) dependsOn(forkjoin % "provided")
+ lazy val dbc = Project("dbc", file(".")) settings(dependentProjectSettings:_*)
+ // TODO - Remove actors dependency from pom...
+ lazy val swing = Project("swing", file(".")) settings(dependentProjectSettings:_*) dependsOn(actors % "provided")
+ // This project will generate man pages (in man1 and html) for scala.
+ lazy val manmakerSettings: Seq[Setting[_]] = dependentProjectSettings :+ externalDeps
+ lazy val manmaker = Project("manual", file(".")) settings(manmakerSettings:_*)
+
+ // Things that compile against the compiler.
+ lazy val compilerDependentProjectSettings = dependentProjectSettings ++ Seq(quickScalaCompilerDependency, addCheaterDependency("scala-compiler"))
+ lazy val partestSettings = compilerDependentProjectSettings :+ externalDeps
+ lazy val partest = Project("partest", file(".")) settings(partestSettings:_*) dependsOn(actors,forkjoin,scalap)
+ lazy val scalapSettings = compilerDependentProjectSettings ++ Seq(
+ name := "scalap",
+ exportJars := true
+ )
+ lazy val scalap = Project("scalap", file(".")) settings(scalapSettings:_*)
+
+ // --------------------------------------------------------------
+ // Continuations plugin + library
+ // --------------------------------------------------------------
+ lazy val continuationsPluginSettings = compilerDependentProjectSettings ++ Seq(
+ scalaSource in Compile <<= baseDirectory(_ / "src/continuations/plugin/"),
+ resourceDirectory in Compile <<= baseDirectory(_ / "src/continuations/plugin/"),
+ exportJars := true,
+ name := "continuations" // Note: This artifact is directly exported.
+
+ )
+ lazy val continuationsPlugin = Project("continuations-plugin", file(".")) settings(continuationsPluginSettings:_*)
+ lazy val continuationsLibrarySettings = dependentProjectSettings ++ Seq(
+ scalaSource in Compile <<= baseDirectory(_ / "src/continuations/library/"),
+ scalacOptions in Compile <++= (exportedProducts in Compile in continuationsPlugin) map {
+ case Seq(cpDir) => Seq("-Xplugin-require:continuations", "-P:continuations:enable", "-Xplugin:"+cpDir.data.getAbsolutePath)
+ }
+ )
+ lazy val continuationsLibrary = Project("continuations-library", file(".")) settings(continuationsLibrarySettings:_*)
+
+ // TODO - OSGi Manifest
+
+ // --------------------------------------------------------------
+ // Real Library Artifact
+ // --------------------------------------------------------------
+ val allSubpathsCopy = (dir: File) => (dir.*** --- dir) x (relativeTo(dir)|flat)
+ def productTaskToMapping(products : Seq[File]) = products flatMap { p => allSubpathsCopy(p) }
+ lazy val packageScalaLibBinTask = Seq(quickLib, continuationsLibrary, forkjoin, actors).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
+ lazy val scalaLibArtifactSettings: Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaLibBinTask)) ++ Seq(
+ name := "scala-library",
+ crossPaths := false,
+ exportJars := true,
+ autoScalaLibrary := false,
+ unmanagedJars in Compile := Seq(),
+ packageDoc in Compile <<= (packageDoc in documentation in Compile).identity,
+ packageSrc in Compile <<= (packageSrc in documentation in Compile).identity,
+ fullClasspath in Runtime <<= (exportedProducts in Compile).identity,
+ quickScalaInstance,
+ target <<= (baseDirectory, name) apply (_ / "target" / _)
+ )
+ lazy val scalaLibrary = Project("scala-library", file(".")) settings(publishSettings:_*) settings(scalaLibArtifactSettings:_*)
+
+ // --------------------------------------------------------------
+ // Real Compiler Artifact
+ // --------------------------------------------------------------
+ lazy val packageScalaBinTask = Seq(quickComp, fjbg).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
+ lazy val scalaBinArtifactSettings : Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaBinTask)) ++ Seq(
+ name := "scala-compiler",
+ crossPaths := false,
+ exportJars := true,
+ autoScalaLibrary := false,
+ unmanagedJars in Compile := Seq(),
+ fullClasspath in Runtime <<= (exportedProducts in Compile).identity,
+ quickScalaInstance,
+ target <<= (baseDirectory, name) apply (_ / "target" / _)
+ )
+ lazy val scalaCompiler = Project("scala-compiler", file(".")) settings(publishSettings:_*) settings(scalaBinArtifactSettings:_*) dependsOn(scalaLibrary)
+ lazy val fullQuickScalaReference = makeScalaReference("pack", scalaLibrary, scalaCompiler, fjbg)
+
+ // --------------------------------------------------------------
+ // Testing
+ // --------------------------------------------------------------
+ /* lazy val scalacheckSettings: Seq[Setting[_]] = Seq(fullQuickScalaReference, crossPaths := false)*/
+ lazy val scalacheck = uri("git://github.com/rickynils/scalacheck.git")
+
+ lazy val testsuiteSettings: Seq[Setting[_]] = compilerDependentProjectSettings ++ partestTaskSettings ++ VerifyClassLoad.settings ++ Seq(
+ unmanagedBase <<= baseDirectory / "test/files/lib",
+ fullClasspath in VerifyClassLoad.checkClassLoad <<= (fullClasspath in scalaLibrary in Runtime).identity,
+ autoScalaLibrary := false,
+ checkSameLibrary <<= checkSameBinaryProjects(quickLib, strappLib),
+ checkSameCompiler <<= checkSameBinaryProjects(quickComp, strappComp),
+ checkSame <<= (checkSameLibrary, checkSameCompiler) map ((a,b) => ()),
+ autoScalaLibrary := false
+ )
+ lazy val continuationsTestsuiteSettings: Seq[Setting[_]] = testsuiteSettings ++ Seq(
+ scalacOptions in Test <++= (exportedProducts in Compile in continuationsPlugin) map {
+ case Seq(cpDir) => Seq("-Xplugin-require:continuations", "-P:continuations:enable", "-Xplugin:"+cpDir.data.getAbsolutePath)
+ },
+ partestDirs <<= baseDirectory apply { bd =>
+ def mkFile(name: String) = bd / "test" / "files" / name
+ def mkTestType(name: String) = name.drop("continuations-".length).toString
+ Seq("continuations-neg", "continuations-run") map (t => mkTestType(t) -> mkFile(t)) toMap
+ }
+ )
+ val testsuite = (
+ Project("testsuite", file("."))
+ settings (testsuiteSettings:_*)
+ dependsOn (swing, scalaLibrary, scalaCompiler, fjbg, partest, scalacheck)
+ )
+ val continuationsTestsuite = (
+ Project("continuations-testsuite", file("."))
+ settings (continuationsTestsuiteSettings:_*)
+ dependsOn (partest, swing, scalaLibrary, scalaCompiler, fjbg)
+ )
+
+ // --------------------------------------------------------------
+ // Generating Documentation.
+ // --------------------------------------------------------------
+
+ // TODO - Migrate this into the dist project.
+ // Scaladocs
+ def distScalaInstance = makeScalaReference("dist", scalaLibrary, scalaCompiler, fjbg)
+ lazy val documentationSettings: Seq[Setting[_]] = dependentProjectSettings ++ Seq(
+ // TODO - Make these work for realz.
+ defaultExcludes in unmanagedSources in Compile := ((".*" - ".") || HiddenFileFilter ||
+ "reflect/Print.scala" ||
+ "reflect/Symbol.scala" ||
+ "reflect/Tree.scala" ||
+ "reflect/Type.scala" ||
+ "runtime/*$.scala" ||
+ "runtime/ScalaRuntime.scala" ||
+ "runtime/StringAdd.scala" ||
+ "scala/swing/test/*"),
+ sourceFilter in Compile := ("*.scala"),
+ unmanagedSourceDirectories in Compile <<= baseDirectory apply { dir =>
+ Seq(dir / "src" / "library" / "scala", dir / "src" / "actors", dir / "src" / "swing", dir / "src" / "continuations" / "library")
+ },
+ compile := inc.Analysis.Empty,
+ scaladocOptions in Compile in doc <++= (baseDirectory) map (bd =>
+ Seq("-sourcepath", (bd / "src" / "library").getAbsolutePath,
+ "-doc-no-compile", (bd / "src" / "library-aux").getAbsolutePath,
+ "-doc-source-url", """https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk/src/€{FILE_PATH}.scala#L1""",
+ "-doc-root-content", (bd / "compiler/scala/tools/nsc/doc/html/resource/lib/rootdoc.txt").getAbsolutePath
+ )),
+ classpathOptions in Compile := ClasspathOptions.manual
+ )
+ lazy val documentation = (
+ Project("documentation", file("."))
+ settings (documentationSettings: _*)
+ dependsOn(quickLib, quickComp, actors, fjbg, forkjoin, swing, continuationsLibrary)
+ )
+
+ // --------------------------------------------------------------
+ // Packaging a distro
+ // --------------------------------------------------------------
+
+ class ScalaToolRunner(classpath: Classpath) {
+ // TODO - Don't use the ant task directly...
+ lazy val classLoader = new java.net.URLClassLoader(classpath.map(_.data.toURI.toURL).toArray, null)
+ lazy val mainClass = classLoader.loadClass("scala.tools.ant.ScalaTool")
+ lazy val executeMethod = mainClass.getMethod("execute")
+ lazy val setFileMethod = mainClass.getMethod("setFile", classOf[java.io.File])
+ lazy val setClassMethod = mainClass.getMethod("setClass", classOf[String])
+ lazy val setClasspathMethod = mainClass.getMethod("setClassPath", classOf[String])
+ lazy val instance = mainClass.newInstance()
+
+ def setClass(cls: String): Unit = setClassMethod.invoke(instance, cls)
+ def setFile(file: File): Unit = setFileMethod.invoke(instance, file)
+ def setClasspath(cp: String): Unit = setClasspathMethod.invoke(instance, cp)
+ def execute(): Unit = executeMethod.invoke(instance)
+ }
+
+ def genBinTask(
+ runner: ScopedTask[ScalaToolRunner],
+ outputDir: ScopedSetting[File],
+ classpath: ScopedTask[Classpath],
+ useClasspath: Boolean
+ ): Project.Initialize[sbt.Task[Map[File,String]]] = {
+ (runner, outputDir, classpath, streams) map { (runner, outDir, cp, s) =>
+ IO.createDirectory(outDir)
+ val classToFilename = Map(
+ "scala.tools.nsc.MainGenericRunner" -> "scala",
+ "scala.tools.nsc.Main" -> "scalac",
+ "scala.tools.nsc.ScalaDoc" -> "scaladoc",
+ "scala.tools.nsc.CompileClient" -> "fsc",
+ "scala.tools.scalap.Main" -> "scalap"
+ )
+ if (useClasspath) {
+ val classpath = Build.data(cp).map(_.getCanonicalPath).distinct.mkString(",")
+ s.log.debug("Setting classpath = " + classpath)
+ runner setClasspath classpath
+ }
+ def genBinFiles(cls: String, dest: File) = {
+ runner.setClass(cls)
+ runner.setFile(dest)
+ runner.execute()
+ // TODO - Mark generated files as executable (755 or a+x) that is *not* JDK6 specific...
+ dest.setExecutable(true)
+ }
+ def makeBinMappings(cls: String, binName: String): Map[File,String] = {
+ val file = outDir / binName
+ val winBinName = binName + ".bat"
+ genBinFiles(cls, file)
+ Map( file -> ("bin/"+binName), outDir / winBinName -> ("bin/"+winBinName) )
+ }
+ classToFilename.flatMap((makeBinMappings _).tupled).toMap
+ }
+ }
+ def runManmakerTask(classpath: ScopedTask[Classpath], scalaRun: ScopedTask[ScalaRun], mainClass: String, dir: String, ext: String): Project.Initialize[Task[Map[File,String]]] =
+ (classpath, scalaRun, streams, target) map { (cp, runner, s, target) =>
+ val binaries = Seq("fsc", "scala", "scalac", "scaladoc", "scalap")
+ binaries map { bin =>
+ val file = target / "man" / dir / (bin + ext)
+ val classname = "scala.man1." + bin
+ IO.createDirectory(file.getParentFile)
+ toError(runner.run(mainClass, Build.data(cp), Seq(classname, file.getAbsolutePath), s.log))
+ file -> ("man/" + dir + "/" + bin + ext)
+ } toMap
+ }
+
+ val genBinRunner = TaskKey[ScalaToolRunner]("gen-bin-runner",
+ "Creates a utility to generate script files for Scala.")
+ val genBin = TaskKey[Map[File,String]]("gen-bin",
+ "Creates script files for Scala distribution.")
+ val binDir = SettingKey[File]("binaries-directory",
+ "Directory where binary scripts will be located.")
+ val genBinQuick = TaskKey[Map[File,String]]("gen-quick-bin",
+ "Creates script files for testing against current Scala build classfiles (not local dist).")
+ val runManmakerMan = TaskKey[Map[File,String]]("make-man",
+ "Runs the man maker project to generate man pages")
+ val runManmakerHtml = TaskKey[Map[File,String]]("make-html",
+ "Runs the man maker project to generate html pages")
+
+ lazy val scalaDistSettings: Seq[Setting[_]] = Seq(
+ crossPaths := false,
+ target <<= (baseDirectory, name) apply (_ / "target" / _),
+ scalaSource in Compile <<= (baseDirectory, name) apply (_ / "src" / _),
+ autoScalaLibrary := false,
+ unmanagedJars in Compile := Seq(),
+ genBinRunner <<= (fullClasspath in quickComp in Runtime) map (new ScalaToolRunner(_)),
+ binDir <<= target(_/"bin"),
+ genBin <<= genBinTask(genBinRunner, binDir, fullClasspath in Runtime, false),
+ binDir in genBinQuick <<= baseDirectory apply (_ / "target" / "bin"),
+ // Configure the classpath this way to avoid having .jar files and previous layers on the classpath.
+ fullClasspath in Runtime in genBinQuick <<= Seq(quickComp,quickLib,scalap,actors,swing,dbc,fjbg,jline,forkjoin).map(classDirectory in Compile in _).join.map(Attributed.blankSeq),
+ fullClasspath in Runtime in genBinQuick <++= (fullClasspath in Compile in jline),
+ genBinQuick <<= genBinTask(genBinRunner, binDir in genBinQuick, fullClasspath in Runtime in genBinQuick, true),
+ runManmakerMan <<= runManmakerTask(fullClasspath in Runtime in manmaker, runner in manmaker, "scala.tools.docutil.EmitManPage", "man1", ".1"),
+ runManmakerHtml <<= runManmakerTask(fullClasspath in Runtime in manmaker, runner in manmaker, "scala.tools.docutil.EmitHtml", "doc", ".html"),
+ // TODO - We could *really* clean this up in many ways. Let's look into making a a Seq of "direct jars" (scalaLibrary, scalaCompiler, jline, scalap)
+ // a seq of "plugin jars" (continuationsPlugin) and "binaries" (genBin) and "documentation" mappings (genBin) that this can aggregate.
+ // really need to figure out a better way to pull jline + jansi.
+ makeDistMappings <<= (genBin,
+ runManmakerMan,
+ runManmakerHtml,
+ packageBin in scalaLibrary in Compile,
+ packageBin in scalaCompiler in Compile,
+ packageBin in jline in Compile,
+ packageBin in continuationsPlugin in Compile,
+ managedClasspath in jline in Compile,
+ packageBin in scalap in Compile) map {
+ (binaries, man, html, lib, comp, jline, continuations, jlineDeps, scalap) =>
+ val jlineDepMap: Seq[(File, String)] = jlineDeps.map(_.data).flatMap(_ x Path.flat) map { case(a,b) => a -> ("lib/"+b) }
+ binaries ++ man ++ html ++ jlineDepMap ++ Seq(
+ lib -> "lib/scala-library.jar",
+ comp -> "lib/scala-compiler.jar",
+ jline -> "lib/jline.jar",
+ continuations -> "misc/scala-devel/plugins/continuations.jar",
+ scalap -> "lib/scalap.jar"
+ ) toMap
+ },
+ // Add in some more dependencies
+ makeDistMappings <<= (makeDistMappings,
+ packageBin in swing in Compile,
+ packageBin in dbc in Compile) map {
+ (dist, s, d) =>
+ dist ++ Seq(s -> "lib/scala-swing.jar", d -> "lib/scala-dbc.jar")
+ },
+ makeDist <<= (makeDistMappings, baseDirectory, streams) map { (maps, dir, s) =>
+ s.log.debug("Map = " + maps.mkString("\n"))
+ val file = dir / "target" / "scala-dist.zip"
+ IO.zip(maps, file)
+ s.log.info("Created " + file.getAbsolutePath)
+ file
+ },
+ makeExplodedDist <<= (makeDistMappings, target, streams) map { (maps, dir, s) =>
+ def sameFile(f: File, f2: File) = f.getCanonicalPath == f2.getCanonicalPath
+ IO.createDirectory(dir)
+ IO.copy(for {
+ (file, name) <- maps
+ val file2 = dir / name
+ if !sameFile(file,file2)
+ } yield (file, file2))
+ // Hack to make binaries be executable. TODO - Fix for JDK 5 and below...
+ maps.values filter (_ startsWith "bin/") foreach (dir / _ setExecutable true)
+ dir
+ }
+ )
+ lazy val scaladist = (
+ Project("dist", file("."))
+ settings (scalaDistSettings: _*)
+ )
+}
+
+/** Matcher to make updated remote project references easier. */
+object ScopedExternalSetting {
+ def unapply[T](s: Setting[_]): Option[(URI, AttributeKey[_], Setting[_])] =
+ s.key.scope.project match {
+ case Select(p @ ProjectRef(uri, _)) => Some((uri, s.key.key, s))
+ case _ => None
+ }
+}
diff --git a/project/Layers.scala b/project/Layers.scala
new file mode 100644
index 0000000000..d39e58014c
--- /dev/null
+++ b/project/Layers.scala
@@ -0,0 +1,92 @@
+import sbt._
+import Keys._
+import com.jsuereth.git.GitKeys.gitRunner
+
+/** This trait stores all the helper methods to generate layers in Scala's layered build. */
+trait Layers extends Build {
+ // TODO - Clean this up or use a self-type.
+
+ /** Default SBT overrides needed for layered compilation. */
+ def settingOverrides: Seq[Setting[_]]
+ /** Reference to the jline project */
+ def jline: Project
+ /** Reference to forkjoin library */
+ def forkjoin: Project
+ /** Reference to Fast-Java-Bytecode-Generator library */
+ def fjbg: Project
+ /** A setting that adds some external dependencies. */
+ def externalDeps: Setting[_]
+
+ /** Creates a reference Scala version that can be used to build other projects. This takes in the raw
+ * library, compiler and fjbg libraries as well as a string representing the layer name (used for compiling the compile-interface).
+ */
+ def makeScalaReference(layer : String, library: Project, compiler: Project, fjbg: Project) =
+ scalaInstance <<= (appConfiguration in library,
+ version in library,
+ (exportedProducts in library in Compile),
+ (exportedProducts in compiler in Compile),
+ (exportedProducts in fjbg in Compile),
+ (fullClasspath in jline in Runtime)) map {
+ (app, version: String, lib: Classpath, comp: Classpath, fjbg: Classpath, jline: Classpath) =>
+ val launcher = app.provider.scalaProvider.launcher
+ (lib,comp) match {
+ case (Seq(libraryJar), Seq(compilerJar)) =>
+ ScalaInstance(
+ version + "-" + layer + "-",
+ libraryJar.data,
+ compilerJar.data,
+ launcher,
+ ((fjbg.files++jline.files):_*))
+ case _ => error("Cannot build a ScalaReference with more than one classpath element")
+ }
+ }
+
+ /** Creates a "layer" of Scala compilation. That is, this will build the next version of Scala from a previous version.
+ * Returns the library project and compiler project from the next layer.
+ * Note: The library and compiler are not *complete* in the sense that they are missing things like "actors" and "fjbg".
+ */
+ def makeLayer(layer: String, referenceScala: Setting[Task[ScalaInstance]]) : (Project, Project) = {
+ val library = Project(layer + "-library", file(".")) settings(settingOverrides: _*) settings(
+ version := layer,
+ // TODO - use depends on.
+ unmanagedClasspath in Compile <<= (exportedProducts in forkjoin in Compile).identity,
+ managedClasspath in Compile := Seq(),
+ scalaSource in Compile <<= (baseDirectory) apply (_ / "src" / "library"),
+ resourceDirectory in Compile <<= baseDirectory apply (_ / "src" / "library"),
+ defaultExcludes in unmanagedResources := ("*.scala" | "*.java" | "*.disabled"),
+ // TODO - Allow other scalac option settings.
+ scalacOptions in Compile <++= (scalaSource in Compile) map (src => Seq("-sourcepath", src.getAbsolutePath)),
+ classpathOptions := ClasspathOptions.manual,
+ resourceGenerators in Compile <+= (baseDirectory, version, resourceManaged, gitRunner, streams) map Release.generatePropertiesFile("library.properties"),
+ referenceScala
+ )
+
+ // Define the compiler
+ val compiler = Project(layer + "-compiler", file(".")) settings(settingOverrides:_*) settings(
+ version := layer,
+ scalaSource in Compile <<= (baseDirectory) apply (_ / "src" / "compiler"),
+ resourceDirectory in Compile <<= baseDirectory apply (_ / "src" / "compiler"),
+ unmanagedSourceDirectories in Compile <+= (baseDirectory) apply (_ / "src" / "msil"),
+ defaultExcludes := ("tests"),
+ javacOptions ++= Seq("-source", "1.4"),
+ defaultExcludes in unmanagedResources := "*.scala",
+ resourceGenerators in Compile <+= (baseDirectory, version, resourceManaged, gitRunner, streams) map Release.generatePropertiesFile("compiler.properties"),
+ // Note, we might be able to use the default task, but for some reason ant was filtering files out. Not sure what's up, but we'll
+ // stick with that for now.
+ unmanagedResources in Compile <<= (baseDirectory) map {
+ (bd) =>
+ val dirs = Seq(bd / "src" / "compiler")
+ dirs.descendentsExcept( ("*.xml" | "*.html" | "*.gif" | "*.png" | "*.js" | "*.css" | "*.tmpl" | "*.swf" | "*.properties" | "*.txt"),"*.scala").get
+ },
+ // TODO - Use depends on *and* SBT's magic dependency mechanisms...
+ unmanagedClasspath in Compile <<= Seq(forkjoin, library, fjbg, jline).map(exportedProducts in Compile in _).join.map(_.flatten),
+ classpathOptions := ClasspathOptions.manual,
+ externalDeps,
+ referenceScala
+ )
+
+ // Return the generated projects.
+ (library, compiler)
+ }
+
+}
diff --git a/project/Partest.scala b/project/Partest.scala
new file mode 100644
index 0000000000..6fc5e11958
--- /dev/null
+++ b/project/Partest.scala
@@ -0,0 +1,141 @@
+import sbt._
+
+import Build._
+import Keys._
+import Project.Initialize
+import complete._
+import scala.collection.{ mutable, immutable }
+
+/** This object */
+object partest {
+
+ /** The key for the run-partest task that exists in Scala's test suite. */
+ lazy val runPartest = TaskKey[Unit]("run-partest", "Runs the partest test suite against the quick.")
+ lazy val runPartestSingle = InputKey[Unit]("run-partest-single", "Runs a single partest test against quick.")
+ lazy val runPartestFailed = TaskKey[Unit]("run-partest-failed", "Runs failed partest tests.")
+ lazy val runPartestGrep = InputKey[Unit]("run-partest-grep", "Runs a single partest test against quick.")
+ lazy val partestRunner = TaskKey[PartestRunner]("partest-runner", "Creates a runner that can run partest suites")
+ lazy val partestTests = TaskKey[Map[String, Seq[File]]]("partest-tests", "Creates a map of test-type to a sequence of the test files/directoryies to test.")
+ lazy val partestDirs = SettingKey[Map[String,File]]("partest-dirs", "The map of partest test type to directory associated with that test type")
+
+ lazy val partestTaskSettings: Seq[Setting[_]] = Seq(
+ javaOptions in partestRunner := Seq("-Xmx512M -Xms256M"),
+ partestDirs <<= baseDirectory apply { bd =>
+ partestTestTypes map (kind => kind -> (bd / "test" / "files" / kind)) toMap
+ },
+ partestRunner <<= partestRunnerTask(fullClasspath in Runtime, javaOptions in partestRunner),
+ partestTests <<= partestTestsTask(partestDirs),
+ runPartest <<= runPartestTask(partestRunner, partestTests, scalacOptions in Test),
+ runPartestSingle <<= runSingleTestTask(partestRunner, partestDirs, scalacOptions in Test),
+ runPartestFailed <<= runPartestTask(partestRunner, partestTests, scalacOptions in Test, Seq("--failed"))
+ )
+
+ // What's fun here is that we want "*.scala" files *and* directories in the base directory...
+ def partestResources(base: File, testType: String): PathFinder = testType match {
+ case "res" => base ** "*.res"
+ case "buildmanager" => base * "*"
+ // TODO - Only allow directories that have "*.scala" children...
+ case _ => base * "*" filter { f => !f.getName.endsWith(".obj") && (f.isDirectory || f.getName.endsWith(".scala")) }
+ }
+ lazy val partestTestTypes = Seq("run", "jvm", "pos", "neg", "buildmanager", "res", "shootout", "scalap", "specialized", "presentation", "scalacheck")
+
+ // TODO - Figure out how to specify only a subset of resources...
+ def partestTestsTask(testDirs: ScopedSetting[Map[String,File]]): Project.Initialize[Task[Map[String, Seq[File]]]] =
+ testDirs map (m => m map { case (kind, dir) => kind -> partestResources(dir, kind).get })
+
+ // TODO - Split partest task into Configurations and build a Task for each Configuration.
+ // *then* mix all of them together for run-testsuite or something clever like this.
+ def runPartestTask(runner: ScopedTask[PartestRunner], testRuns: ScopedTask[Map[String,Seq[File]]], scalacOptions: ScopedTask[Seq[String]], extraArgs: Seq[String] = Seq()): Initialize[Task[Unit]] = {
+ (runner, testRuns, scalacOptions, streams) map {
+ (runner, runs, scalaOpts, s) => runPartestImpl(runner, runs, scalaOpts, s, extraArgs)
+ }
+ }
+ private def runPartestImpl(runner: PartestRunner, runs: Map[String, Seq[File]], scalacOptions: Seq[String], s: TaskStreams, extras: Seq[String] = Seq()): Unit = {
+ val testArgs = runs.toSeq collect { case (kind, files) if files.nonEmpty => Seq("-" + kind, files mkString ",") } flatten
+ val extraArgs = scalacOptions flatMap (opt => Seq("-scalacoption", opt))
+
+ import collection.JavaConverters._
+ val results = runner run Array(testArgs ++ extraArgs ++ extras: _*) asScala
+ // TODO - save results
+ val failures = results collect {
+ case (path, 1) => path + " [FAILED]"
+ case (path, 2) => path + " [TIMEOUT]"
+ }
+
+ if (failures.isEmpty)
+ s.log.info(""+results.size+" tests passed.")
+ else {
+ failures foreach (s.log error _)
+ error("Test Failures! ("+failures.size+" of "+results.size+")")
+ }
+ }
+
+ def convertTestsForAutoComplete(tests: Map[String, Seq[File]]): (Set[String], Set[String]) =
+ (tests.keys.toSet, tests.values flatMap (_ map cleanFileName) toSet)
+
+ /** Takes a test file, as sent ot Partest, and cleans it up for auto-complete */
+ def cleanFileName(file: File): String = {
+ // TODO - Something intelligent here
+ val TestPattern = ".*/test/(.*)".r
+ file.getCanonicalPath match {
+ case TestPattern(n) => n
+ case _ => file.getName
+ }
+ }
+
+ // TODO - Allow a filter for the second part of this...
+ def runSingleTestParser(testDirs: Map[String, File]): State => Parser[(String, String)] = {
+ import DefaultParsers._
+ state => {
+ Space ~> token(NotSpace examples testDirs.keys.toSet) flatMap { kind =>
+ val files: Set[String] = testDirs get kind match {
+ case Some(dir) =>
+ partestResources(dir, kind).get flatMap (_ relativeTo dir) map (_ getName) toSet
+ case _ =>
+ Set()
+ }
+ Space ~> token(NotSpace examples files) map (kind -> _)
+ }
+ }
+ }
+
+ def runSingleTestTask(runner: ScopedTask[PartestRunner], testDirs: ScopedSetting[Map[String, File]], scalacOptions: ScopedTask[Seq[String]]) : Initialize[InputTask[Unit]] = {
+ import sbinary.DefaultProtocol._
+
+ InputTask(testDirs apply runSingleTestParser) { result =>
+ (runner, result, testDirs, scalacOptions, streams) map {
+ case (r, (kind, filter), dirs, o, s) =>
+ // TODO - Use partest resources somehow to filter the filter correctly....
+ val files: Seq[File] =
+ if (filter == "*") partestResources(dirs(kind), kind).get
+ else (dirs(kind) * filter).get
+
+ runPartestImpl(r, Map(kind -> files), o, s)
+ }
+ }
+ }
+
+ def partestRunnerTask(classpath: ScopedTask[Classpath], javacOptions: ScopedSetting[Seq[String]]): Project.Initialize[Task[PartestRunner]] =
+ (classpath, javacOptions) map ((cp, opts) => new PartestRunner(Build.data(cp), opts mkString " "))
+}
+
+class PartestRunner(classpath: Seq[File], javaOpts: String) {
+ // Classloader that does *not* have this as parent, for differing Scala version.
+ lazy val classLoader = new java.net.URLClassLoader(classpath.map(_.toURI.toURL).toArray, null)
+ lazy val (mainClass, mainMethod) = try {
+ val c = classLoader.loadClass("scala.tools.partest.nest.SBTRunner")
+ val m = c.getMethod("mainReflect", classOf[Array[String]])
+ (c,m)
+ }
+ lazy val classPathArgs = Seq("-cp", classpath.map(_.getAbsoluteFile).mkString(java.io.File.pathSeparator))
+ def run(args: Array[String]): java.util.Map[String,Int] = try {
+ // TODO - undo this settings after running. Also globals are bad.
+ System.setProperty("partest.java_opts", javaOpts)
+ val allArgs = (classPathArgs ++ args).toArray
+ mainMethod.invoke(null, allArgs).asInstanceOf[java.util.Map[String,Int]]
+ } catch {
+ case e =>
+ //error("Could not run Partest: " + e)
+ throw e
+ }
+}
diff --git a/project/Release.scala b/project/Release.scala
new file mode 100644
index 0000000000..5ed77548fc
--- /dev/null
+++ b/project/Release.scala
@@ -0,0 +1,115 @@
+import sbt._
+import Keys._
+import _root_.com.jsuereth.git.GitRunner
+
+object Release {
+
+ // TODO - move more of the dist project over here...
+
+
+ lazy val pushStarr = Command.command("push-starr") { (state: State) =>
+ def f(s: Setting[_]): Setting[_] = s.key.key match {
+ case version.key => // TODO - use full version
+ s.asInstanceOf[Setting[String]].mapInit( (_,_) => timeFormat format (new java.util.Date))
+ case organization.key =>
+ s.asInstanceOf[Setting[String]].mapInit( (_,_) => "org.scala-lang.bootstrapp")
+ // TODO - Switch publish repo to be typesafe starr repo.
+ case publishTo.key =>
+ s.asInstanceOf[Setting[Option[Resolver]]].mapInit((_,_) => Some("Starr Repo" at "http://typesafe.artifactoryonline.com/typesafe/starr-releases/"))
+ case _ => s
+ }
+ val extracted = Project.extract(state)
+ import extracted._
+ // Swap version on projects
+ val transformed = session.mergeSettings map ( s => f(s) )
+ val newStructure = Load.reapply(transformed, structure)
+ val newState = Project.setProject(session, newStructure, state)
+ // TODO - Run tasks. Specifically, push scala-compiler + scala-library. *Then* bump the STARR version locally.
+ // The final course of this command should be:
+ // publish-local
+ // Project.evaluateTask(publishLocal, newState)
+ // bump STARR version setting
+ // TODO - Define Task
+ // Rebuild quick + test to ensure it works
+ // Project.evaluateTask(test, newState)
+ // push STARR remotely
+ Project.evaluateTask(publish, newState)
+ // Revert to previous project state.
+ Project.setProject(session, structure, state)
+ }
+
+ // TODO - Autocomplete
+ /*lazy val setStarrHome = Command.single("set-starr-home") { (state: State, homeDir: String) =>
+ def f(s: Setting[_]): Setting[_] =
+ if(s.key.key == scalaInstance.key) {
+ s.asInstanceOf[Setting[ScalaInstance]] mapInit { (key, value) =>
+ if(value.version == "starr")
+ scalaInstance <<= appConfiguration map { app =>
+ val launcher = app.provider.scalaProvider.launcher
+ ScalaInstance("starr", new File(homeDir), launcher)
+ }
+ else value
+ }
+ } else s
+ val extracted = Project.extract(state)
+ import extracted._
+ val transformed = session.mergeSettings map f
+ val newStructure = Load.reapply(transformed, structure)
+ Project.setProject(session, newStructure, state)
+ }*/
+
+ lazy val timeFormat = {
+ val formatter = new java.text.SimpleDateFormat("yyyyMMdd'T'HHmmss")
+ formatter.setTimeZone(java.util.TimeZone.getTimeZone("GMT"))
+ formatter
+ }
+
+ /** This generates a properties file, if it does not already exist, with the maximum lastmodified timestamp
+ * of any source file. */
+ def generatePropertiesFile(name: String)(baseDirectory: File, version: String, dir: File, git: GitRunner, s: TaskStreams): Seq[File] = {
+ // TODO - We can probably clean this up by moving caching bits elsewhere perhaps....
+ val target = dir / name
+ // TODO - Regenerate on triggers, like recompilation or something...
+ val fullVersion = makeFullVersionString(baseDirectory, version, git, s)
+ def hasSameVersion: Boolean = {
+ val props = new java.util.Properties
+ val in = new java.io.FileInputStream(target)
+ try props.load(in) finally in.close()
+ def withoutDate(s: String): String = s.reverse.dropWhile (_ != '.').reverse
+ withoutDate(fullVersion) == withoutDate(props getProperty "version.number")
+ }
+ if (!target.exists || !hasSameVersion) {
+ makePropertiesFile(target, fullVersion)
+ }
+ target :: Nil
+ }
+
+ // This creates the *.properties file used to determine the current version of scala at runtime. TODO - move these somewhere utility like.
+ def makePropertiesFile(f: File, version: String): Unit =
+ IO.write(f, "version.number = "+version+"\ncopyright.string = Copyright 2002-2011, LAMP/EPFL")
+
+ def makeFullVersionString(baseDirectory: File, baseVersion: String, git: GitRunner, s: TaskStreams) = baseVersion+"."+getGitRevision(baseDirectory, git, currentDay, s)
+
+ // TODO - do we want this in the build number?
+ def currentDay = (new java.text.SimpleDateFormat("yyyyMMdd'T'HHmmss")) format (new java.util.Date)
+
+
+
+ def getGitRevision(baseDirectory: File, git: GitRunner, date: String, s: TaskStreams) = {
+
+ val mergeBase = {
+ // TODO - Cache this value.
+ // git("merge-base","v2.8.2","v2.9.1","master")(baseDirectory, s.log)
+ "df13e31bbb"
+ }
+ // current commit sha
+ val sha =
+ git("rev-list", "-n", "1", "HEAD")(baseDirectory, s.log)
+
+ val commits =
+ git("--no-pager", "log", "--pretty=oneline", mergeBase +"..HEAD")(baseDirectory, s.log) split "[\r\n]+" size
+
+ "rdev-%d-%s-g%s" format (commits, date, sha.substring(0,7))
+ }
+
+}
diff --git a/project/Sametest.scala b/project/Sametest.scala
new file mode 100644
index 0000000000..f44fe8ec65
--- /dev/null
+++ b/project/Sametest.scala
@@ -0,0 +1,66 @@
+import sbt._
+
+import Build._
+import Keys._
+
+// This code is adapted from scala.tools.ant.Same by Gilles Dubochet.
+object SameTest {
+ lazy val checkSame: TaskKey[Unit] = TaskKey("check-same-binaries", "checks whether or not the class files generated by scala are the same.")
+ lazy val checkSameLibrary: TaskKey[Unit] = TaskKey("check-same-lib-binaries", "checks whether or not the librayr class files generated by scala are the same.")
+ lazy val checkSameCompiler: TaskKey[Unit] = TaskKey("check-same-comp-binaries", "checks whether or not the compiler class files generated by scala are the same.")
+
+ def checkSameBinaryProjects(lhs: Project, rhs: Project): Project.Initialize[Task[Unit]] =
+ (classDirectory in Compile in lhs, classDirectory in Compile in rhs,
+ compile in Compile in lhs, compile in Compile in rhs, streams) map { (lhs,rhs, _, _, s) =>
+ // Now we generate a complete set of relative files and then
+ def relativeClasses(dir: File) = (dir ** "*.class").get.flatMap(IO.relativize(dir,_).toList)
+ // This code adapted from SameTask in the compiler.
+ def hasDifferentFiles(filePairs: Seq[(File,File)]): Boolean = {
+ filePairs exists { case (a,b) =>
+ if (!a.canRead || !b.canRead) {
+ s.log.error("Either ["+a+"] or ["+b+"] is missing.")
+ true
+ } else {
+ s.log.debug("Checking for binary differences in ["+a+"] against ["+b+"].")
+ val diff = !checkSingleFilePair(a,b)
+ if(diff) s.log.error("["+a+"] differs from ["+b+"]")
+ diff
+ }
+ }
+ }
+ val allClassMappings = (relativeClasses(lhs) ++ relativeClasses(rhs)).distinct
+ val comparisons = allClassMappings.map(f => new File(lhs, f) -> new File(rhs, f))
+ val result = hasDifferentFiles(comparisons)
+ if (result) error("Binary artifacts differ.")
+ }
+
+ val bufferSize = 1024
+
+ // Tests whether two files are binary equivalents of each other.
+ def checkSingleFilePair(originFile: File, destFile: File): Boolean = {
+ Using.fileInputStream(originFile) { originStream =>
+ Using.fileInputStream(destFile) { destStream =>
+ val originBuffer = new Array[Byte](bufferSize)
+ val destBuffer = new Array[Byte](bufferSize)
+ var equalNow = true
+ var originRemaining = originStream.read(originBuffer)
+ var destRemaining = destStream.read(destBuffer)
+ while (originRemaining > 0 && equalNow) {
+ if (originRemaining == destRemaining) {
+ for (idx <- 0 until originRemaining) {
+ equalNow = equalNow && (originBuffer(idx) == destBuffer(idx))
+ }
+ } else {
+ equalNow = false
+ }
+ originRemaining = originStream.read(originBuffer)
+ destRemaining = destStream.read(destBuffer)
+ }
+ if (destRemaining > 0) equalNow = false
+ equalNow
+ }
+ }
+ }
+
+
+}
diff --git a/project/ShaResolve.scala b/project/ShaResolve.scala
new file mode 100644
index 0000000000..f54e96c0c6
--- /dev/null
+++ b/project/ShaResolve.scala
@@ -0,0 +1,117 @@
+import sbt._
+
+import Build._
+import Keys._
+import Project.Initialize
+import scala.collection.{ mutable, immutable }
+import scala.collection.parallel.CompositeThrowable
+import java.security.MessageDigest
+
+
+/** Helpers to resolve SHA artifacts from typesafe repo. */
+object ShaResolve {
+ import dispatch.{Http,url}
+ val remote_urlbase="http://typesafe.artifactoryonline.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap"
+
+ val pullBinaryLibs = TaskKey[Unit]("pull-binary-libs", "Pulls binary libs by the SHA key.")
+ val pushBinaryLibs = TaskKey[Unit]("push-binary-libs", "Pushes binary libs whose SHA has changed.")
+ val binaryLibCache = SettingKey[File]("binary-lib-cache", "Location of the cache of binary libs for this scala build.")
+
+ def settings: Seq[Setting[_]] = Seq(
+ binaryLibCache in ThisBuild := file(System.getProperty("user.home")) / ".sbt" / "cache" / "scala",
+ pullBinaryLibs in ThisBuild <<= (baseDirectory, binaryLibCache, streams) map resolveLibs
+ )
+
+ def resolveLibs(dir: File, cacheDir: File, s: TaskStreams): Unit = loggingParallelExceptions(s) {
+ val files = (dir / "test" / "files" ** "*.desired.sha1") +++ (dir / "lib" ** "*.desired.sha1")
+ for {
+ (file, name) <- (files x relativeTo(dir)).par
+ uri = name.dropRight(13).replace('\\', '/')
+ jar = dir / uri
+ if !jar.exists || !isValidSha(file)
+ sha = getShaFromShafile(file)
+ } pullFile(jar, sha + "/" + uri, cacheDir, sha, s)
+ }
+
+ @inline final def loggingParallelExceptions[U](s: TaskStreams)(f: => U): U = try f catch {
+ case t: CompositeThrowable =>
+ s.log.error("Error during parallel execution, GET READY FOR STACK TRACES!!")
+ t.throwables foreach (t2 => s.log.trace(t2))
+ throw t
+ }
+
+ def getShaFromShafile(file: File): String = parseShaFile(file)._2
+
+ // This should calculate the SHA sum of a file the same as the linux process.
+ def calculateSha(file: File): String = {
+ val digest = MessageDigest.getInstance("SHA1")
+ val in = new java.io.FileInputStream(file);
+ val buffer = new Array[Byte](8192)
+ try {
+ def read(): Unit = in.read(buffer) match {
+ case x if x <= 0 => ()
+ case size => digest.update(buffer, 0, size); read()
+ }
+ read()
+ } finally in.close()
+ val sha = convertToHex(digest.digest())
+ sha
+ }
+
+ // TODO - Prettier way of doing this...
+ private def convertToHex(data: Array[Byte]): String = {
+ val buf = new StringBuffer
+ for (i <- 0 until data.length) {
+ var halfbyte = (data(i) >>> 4) & 0x0F;
+ var two_halfs = 0;
+ while(two_halfs < 2) {
+ if ((0 <= halfbyte) && (halfbyte <= 9))
+ buf.append(('0' + halfbyte).toChar)
+ else
+ buf.append(('a' + (halfbyte - 10)).toChar);
+ halfbyte = data(i) & 0x0F;
+ two_halfs += 1
+ }
+ }
+ return buf.toString
+ }
+
+ // Parses a sha file into a file and a sha.
+ def parseShaFile(file: File): (File, String) =
+ IO.read(file).split("\\s") match {
+ case Array(sha, filename) if filename.startsWith("?") => (new File(file.getParentFile, filename.drop(1)), sha)
+ case Array(sha, filename) => (new File(file.getParentFile, filename), sha)
+ case _ => error(file.getAbsolutePath + " is an invalid sha file")
+ }
+
+
+ def isValidSha(file: File): Boolean =
+ try {
+ val (jar, sha) = parseShaFile(file)
+ jar.exists && calculateSha(jar) == sha
+ } catch {
+ case t: Exception => false
+ }
+
+
+ def pullFile(file: File, uri: String, cacheDir: File, sha: String, s: TaskStreams): Unit = {
+ val cachedFile = cacheDir / uri
+ if (!cachedFile.exists || calculateSha(cachedFile) != sha) {
+ // Ensure the directory for the cache exists.
+ cachedFile.getParentFile.mkdirs()
+ val url = remote_urlbase + "/" + uri
+ val fous = new java.io.FileOutputStream(cachedFile)
+ s.log.info("Pulling [" + cachedFile + "] to cache")
+ try Http(dispatch.url(url) >>> fous) finally fous.close()
+ }
+ s.log.info("Pulling [" + file + "] from local cache")
+ IO.copyFile(cachedFile, file)
+ }
+
+ def pushFile(file: File, uri: String, user: String, pw: String): Unit = {
+ val url = remote_urlbase + "/" + uri
+ val sender = dispatch.url(url).PUT.as(user,pw) <<< (file, "application/java-archive")
+ // TODO - output to logger.
+ Http(sender >>> System.out)
+ }
+}
diff --git a/project/VerifyClassLoad.scala b/project/VerifyClassLoad.scala
new file mode 100644
index 0000000000..c8eebb1159
--- /dev/null
+++ b/project/VerifyClassLoad.scala
@@ -0,0 +1,46 @@
+import sbt._
+
+import Build._
+import Keys._
+
+// This is helper code to validate that generated class files will succed in bytecode verification at class-load time.
+object VerifyClassLoad {
+ lazy val checkClassLoad: TaskKey[Unit] = TaskKey("check-class-load", "checks whether or not the class files generated by scala are deemed acceptable by classloaders.")
+ lazy val checkClassRunner: TaskKey[ClassVerifyRunner] = TaskKey("check-class-runner", "A wrapper around reflective calls to the VerifyClass class.")
+
+
+ def settings: Seq[Setting[_]] = Seq(
+ checkClassRunner <<= (fullClasspath in Runtime) map (cp => new ClassVerifyRunner(data(cp))),
+ fullClasspath in checkClassLoad := Seq(),
+ checkClassLoad <<= (checkClassRunner, fullClasspath in checkClassLoad, streams) map { (runner, dirs, s) =>
+ import collection.JavaConverters._
+ val results = runner.run(data(dirs).map(_.getAbsolutePath).toArray).asScala
+
+ s.log.info("Processed " + results.size + " classes.")
+ val errors = results.filter(_._2 != null)
+ for( (name, result) <- results; if result != null) {
+ s.log.error(name + " had error: " + result)
+ }
+ if(errors.size > 0) error("Classload validation errors encountered")
+ ()
+ }
+ )
+
+ // TODO - Use
+ class ClassVerifyRunner(classpath: Seq[File]) {
+ // Classloader that does *not* have this as parent, for differing Scala version.
+ lazy val classLoader = new java.net.URLClassLoader(classpath.map(_.toURI.toURL).toArray, null)
+ lazy val (mainClass, mainMethod) = try {
+ val c = classLoader.loadClass("scala.tools.util.VerifyClass")
+ val m = c.getMethod("run", classOf[Array[String]])
+ (c,m)
+ }
+ def run(args: Array[String]): java.util.Map[String,String] = try {
+ mainMethod.invoke(null, args).asInstanceOf[java.util.Map[String,String]]
+ } catch {
+ case e =>
+ //error("Could not run Partest: " + e)
+ throw e
+ }
+ }
+}
diff --git a/project/plugins.sbt b/project/plugins.sbt
new file mode 100644
index 0000000000..b49ece7527
--- /dev/null
+++ b/project/plugins.sbt
@@ -0,0 +1,9 @@
+resolvers += Resolver.url("Typesafe nightlies", url("https://typesafe.artifactoryonline.com/typesafe/ivy-snapshots/"))(Resolver.ivyStylePatterns)
+
+resolvers += Resolver.url("scalasbt", new URL("http://scalasbt.artifactoryonline.com/scalasbt/sbt-plugin-releases"))(Resolver.ivyStylePatterns)
+
+resolvers += "jgit-repo" at "http://download.eclipse.org/jgit/maven"
+
+libraryDependencies += "net.databinder" %% "dispatch-http" % "0.8.6"
+
+
diff --git a/project/project/Build.scala b/project/project/Build.scala
new file mode 100644
index 0000000000..1ceb7e2ef2
--- /dev/null
+++ b/project/project/Build.scala
@@ -0,0 +1,7 @@
+import sbt._
+object PluginDef extends Build {
+ override def projects = Seq(root)
+ lazy val root = Project("plugins", file(".")) dependsOn(proguard, git)
+ lazy val proguard = uri("git://github.com/jsuereth/xsbt-proguard-plugin.git")
+ lazy val git = uri("git://github.com/sbt/sbt-git-plugin.git#scala-build")
+}
diff --git a/src/compiler/scala/reflect/runtime/JavaToScala.scala b/src/compiler/scala/reflect/runtime/JavaToScala.scala
index 5297ea6db4..afd623b833 100644
--- a/src/compiler/scala/reflect/runtime/JavaToScala.scala
+++ b/src/compiler/scala/reflect/runtime/JavaToScala.scala
@@ -34,8 +34,10 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable =>
val global: JavaToScala.this.type = self
}
- protected def defaultReflectiveClassLoader(): JClassLoader =
- Thread.currentThread.getContextClassLoader
+ protected def defaultReflectiveClassLoader(): JClassLoader = {
+ val cl = Thread.currentThread.getContextClassLoader
+ if (cl == null) getClass.getClassLoader else cl
+ }
/** Paul: It seems the default class loader does not pick up root classes, whereas the system classloader does.
* Can you check with your newly acquired classloader fu whether this implementation makes sense?
diff --git a/src/manual/scala/tools/docutil/EmitHtml.scala b/src/manual/scala/tools/docutil/EmitHtml.scala
index ddfb8cb0f2..43d097a137 100644
--- a/src/manual/scala/tools/docutil/EmitHtml.scala
+++ b/src/manual/scala/tools/docutil/EmitHtml.scala
@@ -201,164 +201,26 @@ object EmitHtml {
out println "</body>"
out println "</html>"
}
-/* */
-/*
- private def group(ns: Iterable[NodeSeq]): NodeSeq = {
- val zs = new NodeBuffer
- for (z <- ns) { zs &+ z }
- zs
- }
-
- def emitSection(section: Section, depth: int): NodeSeq = {
- def emitText(text: AbstractText): NodeSeq = text match {
- case seq:SeqText =>
- group(seq.components.toList.map(item => emitText(item)))
-
- case Text(text) =>
- scala.xml.Text(escape(text))
-
- case MDash =>
- scala.xml.Text("&#8212;")
-
- case NDash =>
- scala.xml.Text("&#8211;")
-
- case Bold(text) =>
- <b>{emitText(text)}</b>
-
- case Italic(text) =>
- <i>{emitText(text)}</i>
-
- case Emph(text) =>
- <em>{emitText(text)}</em>
-
- case Mono(text) =>
- <code>{emitText(text)}</code>
-
- case Quote(text) =>
- emitText("\"" & text & "\"")
-
- case DefinitionList(definitions @ _*) =>
- <ins><dl>
- {definitions.toList.map(d =>
- <dt>{emitText(d.term)}</dt>
- <dd>{emitText(d.description)}</dd>
- )}
- </dl></ins>
-
- case Link(label, url) =>
- <a href={url}>{emitText(label)}</a>
-
- case _ =>
- error("unknown text node " + text)
- }
-
- def emitParagraph(para: Paragraph): NodeSeq = para match {
- case TextParagraph(text) =>
- <p>{emitText(text)}</p>
- case BlockQuote(text) =>
- <blockquote>{emitText(text)}</blockquote>
-
- case CodeSample(text) =>
- <blockquote><pre>{escape(text)}</pre></blockquote>
-
- case lst:BulletList =>
- <ul>
- {lst.items.toList.map(item => <li>{emitText(item)}</li>)}
- </ul>
-
- case lst:NumberedList =>
- <ol>
- {lst.items.toList.map(item => <li>{emitText(item)}</li>)}
- </ol>
-
- case TitledPara(title, text) =>
- <p><strong>{escape(title)}</strong></p>
- {emitText(text)}
-
- case EmbeddedSection(sect) =>
- {emitSection(sect, depth + 1)}
-
- case _ =>
- error("unknown paragraph node " + para)
- }
-
- val name = section.title.replaceAll("\\p{Space}", "_").toLowerCase()
- <h3 id={name}>{section.title}</h3>.concat(
- group(section.paragraphs.toList.map(p => emitParagraph(p))))
- }
-
- private def emit3columns(col1: String, col2: String, col3: String): NodeSeq =
- <div style="float:left;">{col1}</div>
- <div style="float:right;">{col3}</div>
- <div style="text-align:center;">{col2}</div>
- <div style="clear:both;"></div>
-
- private def emitHeader(col1: String, col2: String, col3: String): NodeSeq =
- <div style="margin: 0 0 2em 0;">
- {emit3columns(col1, col2, col3)}
- </div>
-
- private def emitFooter(col1: String, col2: String, col3: String): NodeSeq = {
- scala.xml.Comment("footer")
- <div style="margin: 2em 0 0 0;">
- {emit3columns(col1, col2, col3)}
- </div>
+ def main(args: Array[String]) = args match{
+ case Array(classname) => emitHtml(classname)
+ case Array(classname, file, _*) => emitHtml(classname, new java.io.FileOutputStream(file))
+ case _ => sys.exit(1)
}
- def emitDocument(document: Document, addDocType: Boolean) = {
- val name = document.title + "(" + document.category.id + ")"
- val doc =
- <html xml:lang="en">
- <head>
- <title>{document.title}</title>
- <meta http-equiv="Content-Language" content="en"/>
- <meta http-equiv="Content-Type" content={"text/html; charset=" + document.encoding}/>
- <meta name="Author" content={document.author}/>
- <style type="text/css">
- {" blockquote, pre { margin:1em 4em 1em 4em; }\n" +
- " p { margin:1em 2em 1em 2em; text-align:justify; }\n"}
- </style>
- </head>
- <body>
- {emitHeader(name, "" + document.category, name)}
- {document.sections.map(s => emitSection(s, 2))}
- {emitFooter("version " + document.version, document.date, name)}
- </body>
- </html>
- out.println(doc)
-/*
- val w = new java.io.StringWriter
- val id = scala.xml.dtd.PublicID("PUBLIC", null)
- val dtd = null //scala.xml.dtd.DEFAULT(true, "")
- val doctype = scala.xml.dtd.DocType("html", id, null) //List(dtd))
- XML.write(w, doc, document.encoding, true/ *xmlDecl* /, doctype)
- out.println(w.toString())
-*/
- }
-*/
- def main(args: Array[String]) {
- if (args.length < 1) {
- System.err println "usage: EmitHtml <classname>"
- sys.exit(1)
- }
+ def emitHtml(classname: String, outStream: java.io.OutputStream = out.out) {
+ if(outStream != out.out) out setOut outStream
try {
val cl = this.getClass.getClassLoader()
- val clasz = cl loadClass args(0)
+ val clasz = cl loadClass classname
val meth = clasz getDeclaredMethod "manpage"
val doc = meth.invoke(null).asInstanceOf[Document]
emitDocument(doc)
} catch {
case ex: Exception =>
ex.printStackTrace()
- System.err println "Error in EmitHtml"
+ System.err println "Error in EmitManPage"
sys.exit(1)
}
}
-
- def emitHtml(classname: String, outStream: java.io.OutputStream) {
- out setOut outStream
- main(Array(classname))
- }
}
diff --git a/src/manual/scala/tools/docutil/EmitManPage.scala b/src/manual/scala/tools/docutil/EmitManPage.scala
index 4a66e2ed07..3e0b02a415 100644
--- a/src/manual/scala/tools/docutil/EmitManPage.scala
+++ b/src/manual/scala/tools/docutil/EmitManPage.scala
@@ -163,10 +163,17 @@ object EmitManPage {
doc.sections foreach (s => emitSection(s, 1))
}
- def main(args: Array[String]) {
+ def main(args: Array[String]) = args match{
+ case Array(classname) => emitManPage(classname)
+ case Array(classname, file, _*) => emitManPage(classname, new java.io.FileOutputStream(file))
+ case _ => sys.exit(1)
+ }
+
+ def emitManPage(classname: String, outStream: java.io.OutputStream = out.out) {
+ if(outStream != out.out) out setOut outStream
try {
val cl = this.getClass.getClassLoader()
- val clasz = cl loadClass args(0)
+ val clasz = cl loadClass classname
val meth = clasz getDeclaredMethod "manpage"
val doc = meth.invoke(null).asInstanceOf[Document]
emitDocument(doc)
@@ -177,9 +184,4 @@ object EmitManPage {
sys.exit(1)
}
}
-
- def emitManPage(classname: String, outStream: java.io.OutputStream) {
- out setOut outStream
- main(Array(classname))
- }
}
diff --git a/src/partest/scala/tools/partest/nest/SBTRunner.scala b/src/partest/scala/tools/partest/nest/SBTRunner.scala
index 299296b01d..ae54e51761 100644
--- a/src/partest/scala/tools/partest/nest/SBTRunner.scala
+++ b/src/partest/scala/tools/partest/nest/SBTRunner.scala
@@ -3,21 +3,21 @@ package nest
import java.io.File
import scala.tools.nsc.io.{ Directory }
+import scala.util.Properties.setProp
-class SBTRunner extends DirectRunner {
-
+object SBTRunner extends DirectRunner {
+
val fileManager = new FileManager {
var JAVACMD: String = "java"
var JAVAC_CMD: String = "javac"
var CLASSPATH: String = _
var LATEST_LIB: String = _
- val testRootPath: String = PathSettings.testRoot.path
- val testRootDir: Directory = PathSettings.testRoot
+ val testRootPath: String = "test"
+ val testRootDir: Directory = Directory(testRootPath)
}
-
+
def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String):java.util.HashMap[String,Int] = {
-
def convert(scalaM:scala.collection.immutable.Map[String,Int]):java.util.HashMap[String,Int] = {
val javaM = new java.util.HashMap[String,Int]()
for(elem <- scalaM) yield {javaM.put(elem._1,elem._2)}
@@ -25,11 +25,60 @@ class SBTRunner extends DirectRunner {
}
def failedOnlyIfRequired(files:List[File]):List[File]={
- if (fileManager.failed) files filter (x => fileManager.logFileExists(x, kind)) else files
+ if (fileManager.failed) files filter (x => fileManager.logFileExists(x, kind)) else files
}
+ convert(runTestsForFiles(failedOnlyIfRequired(kindFiles.toList), kind))
+ }
- convert(runTestsForFiles(failedOnlyIfRequired(kindFiles.toList), kind))
+ case class CommandLineOptions(classpath: Option[String] = None,
+ tests: Map[String, Array[File]] = Map(),
+ scalacOptions: Seq[String] = Seq(),
+ justFailedTests: Boolean = false)
+
+ def mainReflect(args: Array[String]): java.util.Map[String,Int] = {
+ setProp("partest.debug", "true")
+ setProperties()
+
+ val Argument = new scala.util.matching.Regex("-(.*)")
+ def parseArgs(args: Seq[String], data: CommandLineOptions): CommandLineOptions = args match {
+ case Seq("--failed", rest @ _*) => parseArgs(rest, data.copy(justFailedTests = true))
+ case Seq("-cp", cp, rest @ _*) => parseArgs(rest, data.copy(classpath=Some(cp)))
+ case Seq("-scalacoption", opt, rest @ _*) => parseArgs(rest, data.copy(scalacOptions= data.scalacOptions :+ opt))
+ case Seq(Argument(name), runFiles, rest @ _*) => parseArgs(rest, data.copy(tests=data.tests + (name -> runFiles.split(",").map(new File(_)))))
+ case Seq() => data
+ case x => sys.error("Unknown command line options: " + x)
+ }
+ val config = parseArgs(args, CommandLineOptions())
+ fileManager.SCALAC_OPTS = config.scalacOptions
+ fileManager.CLASSPATH = config.classpath getOrElse error("No classpath set")
+ // Find scala library jar file...
+ val lib: Option[String] = (fileManager.CLASSPATH split File.pathSeparator filter (_ matches ".*scala-library.*\\.jar")).headOption
+ fileManager.LATEST_LIB = lib getOrElse error("No scala-library found! Classpath = " + fileManager.CLASSPATH)
+ // TODO - Do something useful here!!!
+ fileManager.JAVAC_CMD = "javac"
+ fileManager.failed = config.justFailedTests
+ // TODO - Make this a flag?
+ //fileManager.updateCheck = true
+ // Now run and report...
+ val runs = config.tests.filterNot(_._2.isEmpty)
+ // This next bit uses java maps...
+ import collection.JavaConverters._
+ (for {
+ (testType, files) <- runs
+ (path, result) <- reflectiveRunTestsForFiles(files,testType).asScala
+ } yield (path, result)).seq asJava
+ }
+ def main(args: Array[String]): Unit = {
+ import collection.JavaConverters._
+ val failures = for {
+ (path, result) <- mainReflect(args).asScala
+ if result == 1 || result == 2
+ val resultName = (if(result == 1) " [FAILED]" else " [TIMEOUT]")
+ } yield path + resultName
+ // Re-list all failures so we can go figure out what went wrong.
+ failures foreach System.err.println
+ if(!failures.isEmpty) sys.exit(1)
}
}