summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore6
-rw-r--r--README.md9
-rw-r--r--build.sbt443
-rwxr-xr-xbuild.xml8
-rw-r--r--compare-build-dirs-ignore-patterns8
-rwxr-xr-xcompare-build-dirs.sh5
-rw-r--r--project/ScalaTool.scala44
-rw-r--r--project/build.properties1
-rw-r--r--project/plugins.sbt1
-rwxr-xr-xscripts/jobs/integrate/bootstrap32
-rw-r--r--scripts/repositories-scala-release7
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala7
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala10
-rw-r--r--src/compiler/scala/tools/nsc/transform/Delambdafy.scala115
-rw-r--r--src/reflect/scala/reflect/api/Quasiquotes.scala2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/JavapClass.scala34
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/Index.scala2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala2
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/html/page/DeprecatedIndex.scala58
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala7
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala8
-rw-r--r--test/files/run/repl-javap-lambdas.scala2
-rw-r--r--test/files/run/t3368-b.check89
-rw-r--r--test/files/run/t3368-b.scala26
-rw-r--r--test/files/run/t3368-c.check85
-rw-r--r--test/files/run/t3368-c.scala26
-rw-r--r--test/files/run/t3368-d.check89
-rw-r--r--test/files/run/t3368-d.scala26
-rw-r--r--test/files/run/t3368.check43
-rw-r--r--test/files/run/t3368.scala8
-rw-r--r--test/files/run/t9268.check5
-rw-r--r--test/files/run/t9268/Java.java12
-rw-r--r--test/files/run/t9268/Test.scala40
-rw-r--r--test/junit/scala/collection/mutable/VectorTest.scala1
-rw-r--r--test/scaladoc/resources/SI-4476.scala9
-rw-r--r--test/scaladoc/scalacheck/DeprecatedIndexTest.scala50
-rw-r--r--test/scaladoc/scalacheck/IndexTest.scala8
38 files changed, 1188 insertions, 142 deletions
diff --git a/.gitignore b/.gitignore
index 20d700dd12..d6571a377f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -48,3 +48,9 @@
# Standard symbolic link to build/quick/bin
/qbin
+
+# Sbt's target directories
+/target/
+/project/target/
+/project/project/target
+/build-sbt/
diff --git a/README.md b/README.md
index 830dfa8d6c..1651333188 100644
--- a/README.md
+++ b/README.md
@@ -138,6 +138,15 @@ The Scala build system is based on Apache Ant. Most required pre-compiled
libraries are part of the repository (in 'lib/'). The following however is
assumed to be installed on the build machine:
+## Building with Sbt (EXPERIMENTAL)
+
+The experimental sbt-based build definition has arrived! Run `sbt package`
+to build the compiler. You can run `sbt test` to run unit (JUnit) tests.
+Use `sbt test/it:test` to run integration (partest) tests.
+
+We would like to migrate to sbt build as quickly as possible. If you would
+like to help please contact scala-internals@ mailing list to discuss your
+ideas and coordinate your effort with others.
### Tips and tricks
diff --git a/build.sbt b/build.sbt
new file mode 100644
index 0000000000..0df2e6a800
--- /dev/null
+++ b/build.sbt
@@ -0,0 +1,443 @@
+/*
+ * The new, sbt-based build definition for Scala.
+ *
+ * What you see below is very much work-in-progress. Basics like compiling and packaging jars
+ * (into right location) work. Everything else is missing:
+ * building docs, placing shell scripts in right locations (so you can run compiler easily),
+ * running partest test, compiling and running JUnit test, and many, many other things.
+ *
+ * You'll notice that this build definition is much more complicated than your typical sbt build.
+ * The main reason is that we are not benefiting from sbt's conventions when it comes project
+ * layout. For that reason we have to configure a lot more explicitly. I've tried explain in
+ * comments the less obvious settings.
+ *
+ * This nicely leads me to explaining goal and non-goals of this build definition. Goals are:
+ *
+ * - to be easy to tweak it in case a bug or small inconsistency is found
+ * - to mimic Ant's behavior as closely as possible
+ * - to be super explicit about any departure from standard sbt settings
+ * - to achieve functional parity with Ant build as quickly as possible
+ * - to be readable and not necessarily succinct
+ * - to provide the nicest development experience for people hacking on Scala
+ *
+ * Non-goals are:
+ *
+ * - to have the shortest sbt build definition possible; we'll beat Ant definition
+ * easily and that will thrill us already
+ * - to remove irregularities from our build process right away
+ * - to modularize the Scala compiler or library further
+ *
+ * It boils down to simple rules:
+ *
+ * - project layout is set in stone for now
+ * - if you need to work on convincing sbt to follow non-standard layout then
+ * explain everything you did in comments
+ * - constantly check where Ant build produces class files, artifacts, what kind of other
+ * files generates and port all of that to here
+ *
+ * Note on bootstrapping:
+ *
+ * Let's start with reminder what bootstrapping means in our context. It's an answer
+ * to this question: which version of Scala are using to compile Scala? The fact that
+ * the question sounds circular suggests trickiness. Indeed, bootstrapping Scala
+ * compiler is a tricky process.
+ *
+ * Ant build used to have involved system of bootstrapping Scala. It would consist of
+ * three layers: starr, locker and quick. The sbt build for Scala ditches layering
+ * and strives to be as standard sbt project as possible. This means that we are simply
+ * building Scala with latest stable release of Scala.
+ * See this discussion for more details behind this decision:
+ * https://groups.google.com/d/topic/scala-internals/gp5JsM1E0Fo/discussion
+ */
+
+val bootstrapScalaVersion = "2.11.5"
+
+def withoutScalaLang(moduleId: ModuleID): ModuleID = moduleId exclude("org.scala-lang", "*")
+
+// exclusion of the scala-library transitive dependency avoids eviction warnings during `update`.
+val scalaParserCombinatorsDep = withoutScalaLang("org.scala-lang.modules" %% "scala-parser-combinators" % versionNumber("scala-parser-combinators"))
+val scalaXmlDep = withoutScalaLang("org.scala-lang.modules" %% "scala-xml" % versionNumber("scala-xml"))
+val partestDep = withoutScalaLang("org.scala-lang.modules" %% "scala-partest" % versionNumber("partest"))
+val partestInterfaceDep = withoutScalaLang("org.scala-lang.modules" %% "scala-partest-interface" % "0.5.0")
+val junitDep = "junit" % "junit" % "4.11"
+val junitIntefaceDep = "com.novocode" % "junit-interface" % "0.11" % "test"
+val jlineDep = "jline" % "jline" % versionProps("jline.version")
+val antDep = "org.apache.ant" % "ant" % "1.9.4"
+val scalacheckDep = withoutScalaLang("org.scalacheck" %% "scalacheck" % "1.11.4")
+
+lazy val commonSettings = clearSourceAndResourceDirectories ++ Seq[Setting[_]](
+ organization := "org.scala-lang",
+ version := "2.11.6-SNAPSHOT",
+ scalaVersion := bootstrapScalaVersion,
+ // we don't cross build Scala itself
+ crossPaths := false,
+ // do not add Scala library jar as a dependency automatically
+ autoScalaLibrary := false,
+ // we also do not add scala instance automatically because it introduces
+ // a circular instance, see: https://github.com/sbt/sbt/issues/1872
+ managedScalaInstance := false,
+ // this is a way to workaround issue described in https://github.com/sbt/sbt/issues/1872
+ // check it out for more details
+ scalaInstance := ScalaInstance(scalaVersion.value, appConfiguration.value.provider.scalaProvider.launcher getScala scalaVersion.value),
+ // we always assume that Java classes are standalone and do not have any dependency
+ // on Scala classes
+ compileOrder := CompileOrder.JavaThenScala,
+ javacOptions in Compile ++= Seq("-g", "-source", "1.5", "-target", "1.6"),
+ // we don't want any unmanaged jars; as a reminder: unmanaged jar is a jar stored
+ // directly on the file system and it's not resolved through Ivy
+ // Ant's build stored unmanaged jars in `lib/` directory
+ unmanagedJars in Compile := Seq.empty,
+ sourceDirectory in Compile := baseDirectory.value,
+ unmanagedSourceDirectories in Compile := List(baseDirectory.value),
+ scalaSource in Compile := (sourceDirectory in Compile).value,
+ javaSource in Compile := (sourceDirectory in Compile).value,
+ // resources are stored along source files in our current layout
+ resourceDirectory in Compile := (sourceDirectory in Compile).value,
+ // each subproject has to ask specifically for files they want to include
+ includeFilter in unmanagedResources in Compile := NothingFilter,
+ target := (baseDirectory in ThisBuild).value / "target" / thisProject.value.id,
+ target in Compile in doc := buildDirectory.value / "scaladoc" / thisProject.value.id,
+ classDirectory in Compile := buildDirectory.value / "quick/classes" / thisProject.value.id,
+ // given that classDirectory is overriden to be _outside_ of target directory, we have
+ // to make sure its being cleaned properly
+ cleanFiles += (classDirectory in Compile).value,
+ fork in run := true
+)
+
+// disable various tasks that are not needed for projects that are used
+// only for compiling code and not publishing it as a standalone artifact
+// we disable those tasks by overriding them and returning bogus files when
+// needed. This is a bit sketchy but I haven't found any better way.
+val disableDocsAndPublishingTasks = Seq[Setting[_]](
+ doc := file("!!! NO DOCS !!!"),
+ publishLocal := {},
+ publish := {},
+ packageBin in Compile := file("!!! NO PACKAGING !!!")
+)
+
+lazy val setJarLocation: Setting[_] =
+ artifactPath in packageBin in Compile := {
+ // two lines below are copied over from sbt's sources:
+ // https://github.com/sbt/sbt/blob/0.13/main/src/main/scala/sbt/Defaults.scala#L628
+ //val resolvedScalaVersion = ScalaVersion((scalaVersion in artifactName).value, (scalaBinaryVersion in artifactName).value)
+ //val resolvedArtifactName = artifactName.value(resolvedScalaVersion, projectID.value, artifact.value)
+ // if you would like to get a jar with version number embedded in it (as normally sbt does)
+ // uncomment the other definition of the `resolvedArtifactName`
+ val resolvedArtifact = artifact.value
+ val resolvedArtifactName = s"${resolvedArtifact.name}.${resolvedArtifact.extension}"
+ buildDirectory.value / "pack/lib" / resolvedArtifactName
+ }
+lazy val scalaSubprojectSettings: Seq[Setting[_]] = commonSettings :+ setJarLocation
+
+lazy val generatePropertiesFileSettings = Seq[Setting[_]](
+ copyrightString := "Copyright 2002-2013, LAMP/EPFL",
+ resourceGenerators in Compile += generateVersionPropertiesFile.map(file => Seq(file)).taskValue,
+ generateVersionPropertiesFile := generateVersionPropertiesFileImpl.value
+)
+
+val libIncludes: FileFilter = "*.tmpl" | "*.xml" | "*.js" | "*.css" | "rootdoc.txt"
+
+lazy val library = configureAsSubproject(project)
+ .settings(generatePropertiesFileSettings: _*)
+ .settings(
+ name := "scala-library",
+ scalacOptions in Compile ++= Seq[String]("-sourcepath", (scalaSource in Compile).value.toString),
+ // Workaround for a bug in `scaladoc` that it seems to not respect the `-sourcepath` option
+ // as a result of this bug, the compiler cannot even initialize Definitions without
+ // binaries of the library on the classpath. Specifically, we get this error:
+ // (library/compile:doc) scala.reflect.internal.FatalError: package class scala does not have a member Int
+ // Ant build does the same thing always: it puts binaries for documented classes on the classpath
+ // sbt never does this by default (which seems like a good default)
+ dependencyClasspath in Compile in doc += (classDirectory in Compile).value,
+ scalacOptions in Compile in doc ++= {
+ val libraryAuxDir = (baseDirectory in ThisBuild).value / "src/library-aux"
+ Seq("-doc-no-compile", libraryAuxDir.toString)
+ },
+ includeFilter in unmanagedResources in Compile := libIncludes)
+ .dependsOn (forkjoin)
+
+lazy val reflect = configureAsSubproject(project)
+ .settings(generatePropertiesFileSettings: _*)
+ .settings(name := "scala-reflect")
+ .dependsOn(library)
+
+val compilerIncludes: FileFilter =
+ "*.tmpl" | "*.xml" | "*.js" | "*.css" | "*.html" | "*.properties" | "*.swf" |
+ "*.png" | "*.gif" | "*.gif" | "*.txt"
+
+lazy val compiler = configureAsSubproject(project)
+ .settings(generatePropertiesFileSettings: _*)
+ .settings(
+ name := "scala-compiler",
+ libraryDependencies += antDep,
+ // this a way to make sure that classes from interactive and scaladoc projects
+ // end up in compiler jar (that's what Ant build does)
+ // we need to use LocalProject references (with strings) to deal with mutual recursion
+ mappings in Compile in packageBin :=
+ (mappings in Compile in packageBin).value ++
+ (mappings in Compile in packageBin in LocalProject("interactive")).value ++
+ (mappings in Compile in packageBin in LocalProject("scaladoc")).value ++
+ (mappings in Compile in packageBin in LocalProject("repl")).value,
+ includeFilter in unmanagedResources in Compile := compilerIncludes)
+ .dependsOn(library, reflect, asm)
+
+lazy val interactive = configureAsSubproject(project)
+ .settings(disableDocsAndPublishingTasks: _*)
+ .dependsOn(compiler)
+
+lazy val repl = configureAsSubproject(project)
+ .settings(libraryDependencies += jlineDep)
+ .settings(disableDocsAndPublishingTasks: _*)
+ .dependsOn(compiler)
+
+lazy val scaladoc = configureAsSubproject(project)
+ .settings(
+ libraryDependencies ++= Seq(scalaXmlDep, scalaParserCombinatorsDep, partestDep)
+ )
+ .settings(disableDocsAndPublishingTasks: _*)
+ .dependsOn(compiler)
+
+lazy val scalap = configureAsSubproject(project).
+ dependsOn(compiler)
+
+// deprecated Scala Actors project
+// TODO: it packages into actors.jar but it should be scala-actors.jar
+lazy val actors = configureAsSubproject(project)
+ .settings(generatePropertiesFileSettings: _*)
+ .settings(name := "scala-actors")
+ .dependsOn(library)
+
+lazy val forkjoin = configureAsForkOfJavaProject(project)
+
+lazy val asm = configureAsForkOfJavaProject(project)
+
+lazy val partestExtras = configureAsSubproject(Project("partest-extras", file(".") / "src" / "partest-extras"))
+ .dependsOn(repl)
+ .settings(clearSourceAndResourceDirectories: _*)
+ .settings(
+ libraryDependencies += partestDep,
+ unmanagedSourceDirectories in Compile := List(baseDirectory.value)
+ )
+
+lazy val junit = project.in(file("test") / "junit")
+ .dependsOn(library, reflect, compiler, partestExtras, scaladoc)
+ .settings(clearSourceAndResourceDirectories: _*)
+ .settings(commonSettings: _*)
+ .settings(
+ fork in Test := true,
+ libraryDependencies ++= Seq(junitDep, junitIntefaceDep),
+ testOptions += Tests.Argument(TestFrameworks.JUnit, "-a", "-v"),
+ unmanagedSourceDirectories in Test := List(baseDirectory.value)
+ )
+
+lazy val partestJavaAgent = (project in file(".") / "src" / "partest-javaagent").
+ dependsOn(asm).
+ settings(commonSettings: _*).
+ settings(
+ doc := file("!!! NO DOCS !!!"),
+ publishLocal := {},
+ publish := {},
+ // Setting name to "scala-partest-javaagent" so that the jar file gets that name, which the Runner relies on
+ name := "scala-partest-javaagent",
+ // writing jar file to $buildDirectory/pack/lib because that's where it's expected to be found
+ setJarLocation,
+ // add required manifest entry - previously included from file
+ packageOptions in (Compile, packageBin) +=
+ Package.ManifestAttributes( "Premain-Class" -> "scala.tools.partest.javaagent.ProfilingAgent" ),
+ // we need to build this to a JAR
+ exportJars := true
+ )
+
+lazy val test = project.
+ dependsOn(compiler, interactive, actors, repl, scalap, partestExtras, partestJavaAgent, asm, scaladoc).
+ configs(IntegrationTest).
+ settings(disableDocsAndPublishingTasks: _*).
+ settings(commonSettings: _*).
+ settings(Defaults.itSettings: _*).
+ settings(
+ libraryDependencies ++= Seq(partestDep, scalaXmlDep, partestInterfaceDep, scalacheckDep),
+ unmanagedBase in Test := baseDirectory.value / "files" / "lib",
+ unmanagedJars in Test <+= (unmanagedBase) (j => Attributed.blank(j)) map(identity),
+ // no main sources
+ sources in Compile := Seq.empty,
+ // test sources are compiled in partest run, not here
+ sources in IntegrationTest := Seq.empty,
+ fork in IntegrationTest := true,
+ javaOptions in IntegrationTest += "-Xmx1G",
+ testFrameworks += new TestFramework("scala.tools.partest.Framework"),
+ testOptions in IntegrationTest += Tests.Setup( () => root.base.getAbsolutePath + "/pull-binary-libs.sh" ! ),
+ definedTests in IntegrationTest += (
+ new sbt.TestDefinition(
+ "partest",
+ // marker fingerprint since there are no test classes
+ // to be discovered by sbt:
+ new sbt.testing.AnnotatedFingerprint {
+ def isModule = true
+ def annotationName = "partest"
+ }, true, Array())
+ )
+ )
+
+lazy val root = (project in file(".")).
+ aggregate(library, forkjoin, reflect, compiler, asm, interactive, repl,
+ scaladoc, scalap, actors, partestExtras, junit).settings(
+ sources in Compile := Seq.empty,
+ onLoadMessage := """|*** Welcome to the sbt build definition for Scala! ***
+ |This build definition has an EXPERIMENTAL status. If you are not
+ |interested in testing or working on the build itself, please use
+ |the Ant build definition for now. Check README.md for more information.""".stripMargin
+ )
+
+lazy val dist = (project in file("dist")).settings(
+ mkBin := mkBinImpl.value
+)
+
+/**
+ * Configures passed project as a subproject (e.g. compiler or repl)
+ * with common settings attached to it.
+ *
+ * Typical usage is:
+ *
+ * lazy val mySubproject = configureAsSubproject(project)
+ *
+ * We pass `project` as an argument which is in fact a macro call. This macro determines
+ * project.id based on the name of the lazy val on the left-hand side.
+ */
+def configureAsSubproject(project: Project): Project = {
+ val base = file(".") / "src" / project.id
+ (project in base).settings(scalaSubprojectSettings: _*)
+}
+
+/**
+ * Configuration for subprojects that are forks of some Java projects
+ * we depend on. At the moment there are just two: asm and forkjoin.
+ *
+ * We do not publish artifacts for those projects but we package their
+ * binaries in a jar of other project (compiler or library).
+ *
+ * For that reason we disable docs generation, packaging and publishing.
+ */
+def configureAsForkOfJavaProject(project: Project): Project = {
+ val base = file(".") / "src" / project.id
+ (project in base).
+ settings(commonSettings: _*).
+ settings(disableDocsAndPublishingTasks: _*).
+ settings(
+ sourceDirectory in Compile := baseDirectory.value,
+ javaSource in Compile := (sourceDirectory in Compile).value,
+ sources in Compile in doc := Seq.empty,
+ classDirectory in Compile := buildDirectory.value / "libs/classes" / thisProject.value.id
+ )
+}
+
+lazy val buildDirectory = settingKey[File]("The directory where all build products go. By default ./build")
+lazy val copyrightString = settingKey[String]("Copyright string.")
+lazy val generateVersionPropertiesFile = taskKey[File]("Generating version properties file.")
+lazy val mkBin = taskKey[Seq[File]]("Generate shell script (bash or Windows batch).")
+
+lazy val generateVersionPropertiesFileImpl: Def.Initialize[Task[File]] = Def.task {
+ val propFile = (resourceManaged in Compile).value / s"${thisProject.value.id}.properties"
+ val props = new java.util.Properties
+
+ /**
+ * Regexp that splits version number split into two parts: version and suffix.
+ * Examples of how the split is performed:
+ *
+ * "2.11.5": ("2.11.5", null)
+ * "2.11.5-acda7a": ("2.11.5", "-acda7a")
+ * "2.11.5-SNAPSHOT": ("2.11.5", "-SNAPSHOT")
+ *
+ */
+ val versionSplitted = """([\w+\.]+)(-[\w+\.]+)??""".r
+
+ val versionSplitted(ver, suffixOrNull) = version.value
+ val osgiSuffix = suffixOrNull match {
+ case null => "-VFINAL"
+ case "-SNAPSHOT" => ""
+ case suffixStr => suffixStr
+ }
+
+ def executeTool(tool: String) = {
+ val cmd =
+ if (System.getProperty("os.name").toLowerCase.contains("windows"))
+ s"cmd.exe /c tools\\$tool.bat -p"
+ else s"tools/$tool"
+ Process(cmd).lines.head
+ }
+
+ val commitDate = executeTool("get-scala-commit-date")
+ val commitSha = executeTool("get-scala-commit-sha")
+
+ props.put("version.number", s"${version.value}-$commitDate-$commitSha")
+ props.put("maven.version.number", s"${version.value}")
+ props.put("osgi.version.number", s"$ver.v$commitDate$osgiSuffix-$commitSha")
+ props.put("copyright.string", copyrightString.value)
+
+ // unfortunately, this will write properties in arbitrary order
+ // this makes it harder to test for stability of generated artifacts
+ // consider using https://github.com/etiennestuder/java-ordered-properties
+ // instead of java.util.Properties
+ IO.write(props, null, propFile)
+
+ propFile
+}
+
+// Defining these settings is somewhat redundant as we also redefine settings that depend on them.
+// However, IntelliJ's project import works better when these are set correctly.
+def clearSourceAndResourceDirectories = Seq(Compile, Test).flatMap(config => inConfig(config)(Seq(
+ unmanagedSourceDirectories := Nil,
+ managedSourceDirectories := Nil,
+ unmanagedResourceDirectories := Nil,
+ managedResourceDirectories := Nil
+)))
+
+lazy val mkBinImpl: Def.Initialize[Task[Seq[File]]] = Def.task {
+ def mkScalaTool(mainCls: String, classpath: Seq[Attributed[File]]): ScalaTool =
+ ScalaTool(mainClass = mainCls,
+ classpath = classpath.toList.map(_.data.getAbsolutePath),
+ properties = Map.empty,
+ javaOpts = "-Xmx256M -Xms32M",
+ toolFlags = "")
+ val rootDir = (classDirectory in Compile in compiler).value
+ def writeScripts(scalaTool: ScalaTool, file: String, outDir: File): Seq[File] =
+ Seq(
+ scalaTool.writeScript(file, "unix", rootDir, outDir),
+ scalaTool.writeScript(file, "windows", rootDir, outDir)
+ )
+ def mkQuickBin(file: String, mainCls: String, classpath: Seq[Attributed[File]]): Seq[File] = {
+ val scalaTool = mkScalaTool(mainCls, classpath)
+ val outDir = buildDirectory.value / "quick/bin"
+ writeScripts(scalaTool, file, outDir)
+ }
+
+ def mkPackBin(file: String, mainCls: String): Seq[File] = {
+ val scalaTool = mkScalaTool(mainCls, classpath = Nil)
+ val outDir = buildDirectory.value / "pack/bin"
+ writeScripts(scalaTool, file, outDir)
+ }
+
+ def mkBin(file: String, mainCls: String, classpath: Seq[Attributed[File]]): Seq[File] =
+ mkQuickBin(file, mainCls, classpath) ++ mkPackBin(file, mainCls)
+
+ mkBin("scala" , "scala.tools.nsc.MainGenericRunner", (fullClasspath in Compile in repl).value) ++
+ mkBin("scalac" , "scala.tools.nsc.Main", (fullClasspath in Compile in compiler).value) ++
+ mkBin("fsc" , "scala.tools.nsc.CompileClient", (fullClasspath in Compile in compiler).value) ++
+ mkBin("scaladoc" , "scala.tools.nsc.ScalaDoc", (fullClasspath in Compile in scaladoc).value) ++
+ mkBin("scalap" , "scala.tools.scalap.Main", (fullClasspath in Compile in scalap).value)
+}
+
+buildDirectory in ThisBuild := (baseDirectory in ThisBuild).value / "build-sbt"
+
+lazy val versionProps: Map[String, String] = {
+ import java.io.FileInputStream
+ import java.util.Properties
+ val props = new Properties()
+ val in = new FileInputStream(file("versions.properties"))
+ try props.load(in)
+ finally in.close()
+ import scala.collection.JavaConverters._
+ props.asScala.toMap
+}
+
+def versionNumber(name: String): String =
+ versionProps(s"$name.version.number")
diff --git a/build.xml b/build.xml
index 4b79b68a02..0a67f8a563 100755
--- a/build.xml
+++ b/build.xml
@@ -165,7 +165,7 @@ TODO:
<property name="build.dir" value="${basedir}/build"/>
<property name="build-deps.dir" value="${build.dir}/deps"/>
<property name="build-libs.dir" value="${build.dir}/libs"/>
- <property name="build-asm.dir" value="${build.dir}/asm"/>
+ <property name="build-asm.dir" value="${build-libs.dir}"/>
<property name="build-forkjoin.dir" value="${build-libs.dir}"/>
<property name="build-locker.dir" value="${build.dir}/locker"/>
<property name="build-quick.dir" value="${build.dir}/quick"/>
@@ -588,8 +588,8 @@ TODO:
</propertyfile>
</then></if>
- <path id="forkjoin.classpath" path="${build-libs.dir}/classes/forkjoin"/>
- <path id="asm.classpath" path="${build-asm.dir}/classes"/>
+ <path id="forkjoin.classpath" path="${build-forkjoin.dir}/classes/forkjoin"/>
+ <path id="asm.classpath" path="${build-asm.dir}/classes/asm"/>
<property name="forkjoin-classes" refid="forkjoin.classpath"/>
<property name="asm-classes" refid="asm.classpath"/>
@@ -1061,7 +1061,7 @@ TODO:
============================================================================ -->
<target name="asm.done" depends="init"> <simple-javac project="asm" jar="no"/> </target>
- <target name="forkjoin.done" depends="init"> <simple-javac project="forkjoin" args="-XDignore.symbol.file"/></target>
+ <target name="forkjoin.done" depends="init"> <simple-javac project="forkjoin" args="-XDignore.symbol.file" jar="no"/></target>
<!-- For local development only. We only allow released versions of Scala for STARR.
This builds quick (core only) and publishes it with a generated version number,
diff --git a/compare-build-dirs-ignore-patterns b/compare-build-dirs-ignore-patterns
new file mode 100644
index 0000000000..8c8160ba15
--- /dev/null
+++ b/compare-build-dirs-ignore-patterns
@@ -0,0 +1,8 @@
+.DS_Store
+*.complete
+locker
+deps
+scala-continuations-*.jar
+scala-parser-combinators*.jar
+scala-swing*.jar
+scala-xml*.jar
diff --git a/compare-build-dirs.sh b/compare-build-dirs.sh
new file mode 100755
index 0000000000..f6806dd422
--- /dev/null
+++ b/compare-build-dirs.sh
@@ -0,0 +1,5 @@
+# Compares build directories generated by Ant and sbt build definitions
+# This let's us to see how far are we from achieving perfect parity
+# between the builds
+
+diff -X compare-build-dirs-ignore-patterns -qr build/ build-sbt/
diff --git a/project/ScalaTool.scala b/project/ScalaTool.scala
new file mode 100644
index 0000000000..559b215c18
--- /dev/null
+++ b/project/ScalaTool.scala
@@ -0,0 +1,44 @@
+import sbt._
+import org.apache.commons.lang3.StringUtils.replaceEach
+
+/**
+ * A class that generates a shell or batch script to execute a Scala program.
+ *
+ * This is a simplified copy of Ant task (see scala.tools.ant.ScalaTool).
+ */
+case class ScalaTool(mainClass: String,
+ classpath: List[String],
+ properties: Map[String, String],
+ javaOpts: String,
+ toolFlags: String) {
+ // For classpath, the platform specific
+ // demarcation of any script variables (e.g. `${SCALA_HOME}` or
+ // `%SCALA_HOME%`) can be specified in a platform independent way (e.g.
+ // `@SCALA_HOME@`) and automatically translated for you.
+ def patchedToolScript(template: String, platform: String) = {
+ val varRegex = """@(\w+)@""" // the group should be able to capture each of the keys of the map below
+
+ val variables = Map(
+ ("@@" -> "@"), // for backwards compatibility
+ ("@class@" -> mainClass),
+ ("@properties@" -> (properties map { case (k, v) => s"""-D$k="$v""""} mkString " ")),
+ ("@javaflags@" -> javaOpts),
+ ("@toolflags@" -> toolFlags),
+ ("@classpath@" -> (platform match {
+ case "unix" => classpath.mkString(":").replace('\\', '/').replaceAll(varRegex, """\${$1}""")
+ case "windows" => classpath.mkString(";").replace('/', '\\').replaceAll(varRegex, "%$1%")
+ }))
+ )
+
+ val (from, to) = variables.unzip
+ replaceEach(template, from.toArray, to.toArray)
+ }
+
+ def writeScript(file: String, platform: String, rootDir: File, outDir: File): File = {
+ val templatePath = s"scala/tools/ant/templates/tool-$platform.tmpl"
+ val suffix = platform match { case "windows" => ".bat" case _ => "" }
+ val scriptFile = outDir / s"$file$suffix"
+ IO.write(scriptFile, patchedToolScript(IO.read(rootDir / templatePath), platform))
+ scriptFile
+ }
+}
diff --git a/project/build.properties b/project/build.properties
new file mode 100644
index 0000000000..748703f770
--- /dev/null
+++ b/project/build.properties
@@ -0,0 +1 @@
+sbt.version=0.13.7
diff --git a/project/plugins.sbt b/project/plugins.sbt
new file mode 100644
index 0000000000..dc266a8db1
--- /dev/null
+++ b/project/plugins.sbt
@@ -0,0 +1 @@
+libraryDependencies += "org.apache.commons" % "commons-lang3" % "3.3.2" \ No newline at end of file
diff --git a/scripts/jobs/integrate/bootstrap b/scripts/jobs/integrate/bootstrap
index d0a5b452a8..7944ab3fd3 100755
--- a/scripts/jobs/integrate/bootstrap
+++ b/scripts/jobs/integrate/bootstrap
@@ -108,8 +108,22 @@ rm -rf $baseDir/resolutionScratch_
mkdir -p $baseDir/resolutionScratch_
# repo used to publish "locker" scala to (to start the bootstrap)
-privateCred="private-repo"
-privateRepo="http://private-repo.typesafe.com/typesafe/scala-release-temp/"
+releaseTempRepoCred="private-repo"
+releaseTempRepoUrl=${releaseTempRepoUrl-"http://private-repo.typesafe.com/typesafe/scala-release-temp/"}
+
+# Used below in sbtArgs since we use a dedicated repository to share artifcacts between jobs,
+# so we need to configure SBT to use these rather than its default, Maven Central.
+# See http://www.scala-sbt.org/0.13/docs/Proxy-Repositories.html
+sbtRepositoryConfig="$scriptsDir/repositories-scala-release"
+cat > "$sbtRepositoryConfig" << EOF
+[repositories]
+ plugins: http://dl.bintray.com/sbt/sbt-plugin-releases/, [organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
+ private-repo: $releaseTempRepoUrl
+ typesafe-ivy-releases: http://repo.typesafe.com/typesafe/ivy-releases/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly
+ sbt-plugin-releases: http://scalasbt.artifactoryonline.com/scalasbt/sbt-plugin-releases, [organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
+ maven-central
+ local
+EOF
##### git
gfxd() {
@@ -158,7 +172,7 @@ function st_stagingRepoClose() {
# the old version (on jenkins, and I don't want to upgrade for risk of breaking other builds) honors -sbt-dir
# the new version of sbt-extras ignores sbt-dir, so we pass it in as -Dsbt.global.base
# need to set sbt-dir to one that has the gpg.sbt plugin config
-sbtArgs="-no-colors -ivy $baseDir/ivy2 -Dsbt.override.build.repos=true -Dsbt.repository.config=$scriptsDir/repositories-scala-release -Dsbt.global.base=$HOME/.sbt/0.13 -sbt-dir $HOME/.sbt/0.13"
+sbtArgs="-no-colors -ivy $baseDir/ivy2 -Dsbt.override.build.repos=true -Dsbt.repository.config=$sbtRepositoryConfig -Dsbt.global.base=$HOME/.sbt/0.13 -sbt-dir $HOME/.sbt/0.13"
sbtBuild() {
echo "### sbtBuild: "$sbtCmd $sbtArgs "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@"
@@ -457,8 +471,8 @@ bootstrap() {
# in sabbus lingo, the resulting Scala build will be used as starr to build the released Scala compiler
ant -Dmaven.version.number=$SCALA_VER\
-Dremote.snapshot.repository=NOPE\
- -Dremote.release.repository=$privateRepo\
- -Drepository.credentials.id=$privateCred\
+ -Dremote.release.repository=$releaseTempRepoUrl\
+ -Drepository.credentials.id=$releaseTempRepoCred\
-Dscalac.args.optimise=-optimise\
-Ddocs.skip=1\
-Dlocker.skip=1\
@@ -471,7 +485,7 @@ bootstrap() {
# publish to our internal repo (so we can resolve the modules in the scala build below)
# we only need to build the modules necessary to build Scala itself
# since the version of locker and quick are the same
- publishTasks=('set credentials += Credentials(Path.userHome / ".credentials-private-repo")' "set every publishTo := Some(\"private-repo\" at \"$privateRepo\")")
+ publishTasks=('set credentials += Credentials(Path.userHome / ".credentials-private-repo")' "set every publishTo := Some(\"private-repo\" at \"$releaseTempRepoUrl\")")
buildTasks=($publishPrivateTask)
buildModules
@@ -496,14 +510,14 @@ bootstrap() {
# which is fully cross-versioned (for $SCALA_VER, the version we're releasing)
ant -Dstarr.version=$SCALA_VER\
-Dscala.full.version=$SCALA_VER\
- -Dextra.repo.url=$privateRepo\
+ -Dextra.repo.url=$releaseTempRepoUrl\
-Dmaven.version.suffix=$SCALA_VER_SUFFIX\
${updatedModuleVersions[@]} \
-Dupdate.versions=1\
-Dscaladoc.git.commit=$SCALADOC_SOURCE_LINKS_VER\
-Dremote.snapshot.repository=NOPE\
- -Dremote.release.repository=$privateRepo\
- -Drepository.credentials.id=$privateCred\
+ -Dremote.release.repository=$releaseTempRepoUrl\
+ -Drepository.credentials.id=$releaseTempRepoCred\
-Dscalac.args.optimise=-optimise\
$antBuildTask $publishPrivateTask
diff --git a/scripts/repositories-scala-release b/scripts/repositories-scala-release
deleted file mode 100644
index 00538a08ff..0000000000
--- a/scripts/repositories-scala-release
+++ /dev/null
@@ -1,7 +0,0 @@
-[repositories]
- plugins: http://dl.bintray.com/sbt/sbt-plugin-releases/, [organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
- private-repo: http://private-repo.typesafe.com/typesafe/scala-release-temp/
- typesafe-ivy-releases: http://repo.typesafe.com/typesafe/ivy-releases/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly
- sbt-plugin-releases: http://scalasbt.artifactoryonline.com/scalasbt/sbt-plugin-releases, [organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
- maven-central
- local \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
index 99399e363f..67241ef639 100755
--- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
@@ -126,10 +126,9 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
def makeTextPat(txt: Tree) = Apply(_scala_xml__Text, List(txt))
def makeText1(txt: Tree) = New(_scala_xml_Text, LL(txt))
def comment(pos: Position, text: String) = atPos(pos)( Comment(const(text)) )
- def charData(pos: Position, txt: String) = atPos(pos) {
- val t = if (isPattern) Apply(_scala_xml(xmlterms._PCData), List(const(txt)))
- else New(_scala_xml(_PCData), LL(const(txt)))
- if (coalescing) t updateAttachment TextAttache(pos, txt) else t
+ def charData(pos: Position, txt: String) = if (coalescing) text(pos, txt) else atPos(pos) {
+ if (isPattern) Apply(_scala_xml(xmlterms._PCData), List(const(txt)))
+ else New(_scala_xml(_PCData), LL(const(txt)))
}
def procInstr(pos: Position, target: String, txt: String) =
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index f217d21c35..630276e412 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -142,7 +142,7 @@ trait ScalaSettings extends AbsScalaSettings
// XML parsing options
object XxmlSettings extends MultiChoiceEnumeration {
val coalescing = Choice("coalescing", "Convert PCData to Text and coalesce sibling nodes")
- def isCoalescing = Xxml contains coalescing
+ def isCoalescing = (Xxml contains coalescing) || (!isScala212 && !Xxml.isSetByUser)
}
val Xxml = MultiChoiceSetting(
name = "-Xxml",
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 994bcd8359..a59b9d3f48 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -1141,16 +1141,12 @@ abstract class ClassfileParser {
private def innerSymbol(entry: InnerClassEntry): Symbol = {
val name = entry.originalName.toTypeName
val enclosing = entry.enclosing
- def getMember = (
+ val member = (
if (enclosing == clazz) entry.scope lookup name
else lookupMemberAtTyperPhaseIfPossible(enclosing, name)
)
- getMember
- /* There used to be an assertion that this result is not NoSymbol; changing it to an error
- * revealed it had been going off all the time, but has been swallowed by a catch t: Throwable
- * in Repository.scala. Since it has been accomplishing nothing except misleading anyone who
- * thought it wasn't triggering, I removed it entirely.
- */
+ def newStub = enclosing.newStubSymbol(name, s"Unable to locate class corresponding to inner class entry for $name in owner ${entry.outerName}")
+ member.orElse(newStub)
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
index 94e88589f5..2d33b35241 100644
--- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
+++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
@@ -15,13 +15,12 @@ import scala.collection.mutable.LinkedHashMap
* Currently Uncurry is responsible for that transformation.
*
* From a lambda, Delambdafy will create
- * 1) a static forwarder at the top level of the class that contained the lambda
- * 2) a new top level class that
+ * 1) a new top level class that
a) has fields and a constructor taking the captured environment (including possibly the "this"
* reference)
- * b) an apply method that calls the static forwarder
+ * b) an apply method that calls the target method
* c) if needed a bridge method for the apply method
- * 3) an instantiation of the newly created class which replaces the lambda
+ * 2) an instantiation of the newly created class which replaces the lambda
*
* TODO the main work left to be done is to plug into specialization. Primarily that means choosing a
* specialized FunctionN trait instead of the generic FunctionN trait as a parent and creating the
@@ -76,36 +75,25 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
referrers
}
- val accessorMethods = mutable.ArrayBuffer[Tree]()
-
- // the result of the transformFunction method. A class definition for the lambda, an expression
- // insantiating the lambda class, and an accessor method for the lambda class to be able to
- // call the implementation
- case class TransformedFunction(lambdaClassDef: ClassDef, newExpr: Tree, accessorMethod: Tree)
+ // the result of the transformFunction method.
+ sealed abstract class TransformedFunction
+ // A class definition for the lambda, an expression insantiating the lambda class
+ case class DelambdafyAnonClass(lambdaClassDef: ClassDef, newExpr: Tree) extends TransformedFunction
// here's the main entry point of the transform
override def transform(tree: Tree): Tree = tree match {
// the main thing we care about is lambdas
case fun @ Function(_, _) =>
- // a lambda beccomes a new class, an instantiation expression, and an
- // accessor method
- val TransformedFunction(lambdaClassDef, newExpr, accessorMethod) = transformFunction(fun)
- // we'll add accessor methods to the current template later
- accessorMethods += accessorMethod
- val pkg = lambdaClassDef.symbol.owner
-
- // we'll add the lambda class to the package later
- lambdaClassDefs(pkg) = lambdaClassDef :: lambdaClassDefs(pkg)
-
- super.transform(newExpr)
- // when we encounter a template (basically the thing that holds body of a class/trait)
- // we need to updated it to include newly created accessor methods after transforming it
- case Template(_, _, _) =>
- try {
- // during this call accessorMethods will be populated from the Function case
- val Template(parents, self, body) = super.transform(tree)
- Template(parents, self, body ++ accessorMethods)
- } finally accessorMethods.clear()
+ transformFunction(fun) match {
+ case DelambdafyAnonClass(lambdaClassDef, newExpr) =>
+ // a lambda beccomes a new class, an instantiation expression
+ val pkg = lambdaClassDef.symbol.owner
+
+ // we'll add the lambda class to the package later
+ lambdaClassDefs(pkg) = lambdaClassDef :: lambdaClassDefs(pkg)
+
+ super.transform(newExpr)
+ }
case _ => super.transform(tree)
}
@@ -120,8 +108,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
private def optionSymbol(sym: Symbol): Option[Symbol] = if (sym.exists) Some(sym) else None
- // turns a lambda into a new class def, a New expression instantiating that class, and an
- // accessor method fo the body of the lambda
+ // turns a lambda into a new class def, a New expression instantiating that class
private def transformFunction(originalFunction: Function): TransformedFunction = {
val functionTpe = originalFunction.tpe
val targs = functionTpe.typeArgs
@@ -132,46 +119,16 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
// passed into the constructor of the anonymous function class
val captures = FreeVarTraverser.freeVarsOf(originalFunction)
- /**
- * Creates the apply method for the anonymous subclass of FunctionN
- */
- def createAccessorMethod(thisProxy: Symbol, fun: Function): DefDef = {
- val target = targetMethod(fun)
- if (!thisProxy.exists) {
- target setFlag STATIC
- }
- val params = ((optionSymbol(thisProxy) map {proxy:Symbol => ValDef(proxy)}) ++ (target.paramss.flatten map ValDef.apply)).toList
-
- val methSym = oldClass.newMethod(unit.freshTermName(nme.accessor.toString() + "$"), target.pos, FINAL | BRIDGE | SYNTHETIC | PROTECTED | STATIC)
+ val target = targetMethod(originalFunction)
+ target.makeNotPrivate(target.owner)
+ if (!thisReferringMethods.contains(target))
+ target setFlag STATIC
- val paramSyms = params map {param => methSym.newSyntheticValueParam(param.symbol.tpe, param.name) }
-
- params zip paramSyms foreach { case (valdef, sym) => valdef.symbol = sym }
- params foreach (_.symbol.owner = methSym)
-
- val methodType = MethodType(paramSyms, restpe)
- methSym setInfo methodType
-
- oldClass.info.decls enter methSym
-
- val body = localTyper.typed {
- val newTarget = Select(if (thisProxy.exists) gen.mkAttributedRef(paramSyms(0)) else gen.mkAttributedThis(oldClass), target)
- val newParams = paramSyms drop (if (thisProxy.exists) 1 else 0) map Ident
- Apply(newTarget, newParams)
- } setPos fun.pos
- val methDef = DefDef(methSym, List(params), body)
-
- // Have to repack the type to avoid mismatches when existentials
- // appear in the result - see SI-4869.
- // TODO probably don't need packedType
- methDef.tpt setType localTyper.packedType(body, methSym)
- methDef
- }
/**
* Creates the apply method for the anonymous subclass of FunctionN
*/
- def createApplyMethod(newClass: Symbol, fun: Function, accessor: DefDef, thisProxy: Symbol): DefDef = {
+ def createApplyMethod(newClass: Symbol, fun: Function, thisProxy: Symbol): DefDef = {
val methSym = newClass.newMethod(nme.apply, fun.pos, FINAL | SYNTHETIC)
val params = fun.vparams map (_.duplicate)
@@ -187,8 +144,12 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
newClass.info.decls enter methSym
val Apply(_, oldParams) = fun.body
+ val qual = if (thisProxy.exists)
+ Select(gen.mkAttributedThis(newClass), thisProxy)
+ else
+ gen.mkAttributedThis(oldClass) // sort of a lie, EmptyTree.<static method> would be more honest, but the backend chokes on that.
- val body = localTyper typed Apply(Select(gen.mkAttributedThis(oldClass), accessor.symbol), (optionSymbol(thisProxy) map {tp => Select(gen.mkAttributedThis(newClass), tp)}).toList ++ oldParams)
+ val body = localTyper typed Apply(Select(qual, target), oldParams)
body.substituteSymbols(fun.vparams map (_.symbol), params map (_.symbol))
body changeOwner (fun.symbol -> methSym)
@@ -271,18 +232,16 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
// the Optional proxy that will hold a reference to the 'this'
// object used by the lambda, if any. NoSymbol if there is no this proxy
val thisProxy = {
- val target = targetMethod(originalFunction)
- if (thisReferringMethods contains target) {
+ if (target.hasFlag(STATIC))
+ NoSymbol
+ else {
val sym = lambdaClass.newVariable(nme.FAKE_LOCAL_THIS, originalFunction.pos, SYNTHETIC)
- sym.info = oldClass.tpe
- sym
- } else NoSymbol
+ sym.setInfo(oldClass.tpe)
+ }
}
val decapturify = new DeCapturifyTransformer(captureProxies2, unit, oldClass, lambdaClass, originalFunction.symbol.pos, thisProxy)
- val accessorMethod = createAccessorMethod(thisProxy, originalFunction)
-
val decapturedFunction = decapturify.transform(originalFunction).asInstanceOf[Function]
val members = (optionSymbol(thisProxy).toList ++ (captureProxies2 map (_._2))) map {member =>
@@ -294,7 +253,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
val constr = createConstructor(lambdaClass, members)
// apply method with same arguments and return type as original lambda.
- val applyMethodDef = createApplyMethod(lambdaClass, decapturedFunction, accessorMethod, thisProxy)
+ val applyMethodDef = createApplyMethod(lambdaClass, decapturedFunction, thisProxy)
val bridgeMethod = createBridgeMethod(lambdaClass, originalFunction, applyMethodDef)
@@ -312,10 +271,10 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
val body = members ++ List(constr, applyMethodDef) ++ bridgeMethod
// TODO if member fields are private this complains that they're not accessible
- (localTyper.typedPos(decapturedFunction.pos)(ClassDef(lambdaClass, body)).asInstanceOf[ClassDef], thisProxy, accessorMethod)
+ (localTyper.typedPos(decapturedFunction.pos)(ClassDef(lambdaClass, body)).asInstanceOf[ClassDef], thisProxy)
}
- val (anonymousClassDef, thisProxy, accessorMethod) = makeAnonymousClass
+ val (anonymousClassDef, thisProxy) = makeAnonymousClass
pkg.info.decls enter anonymousClassDef.symbol
@@ -327,7 +286,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
val typedNewStat = localTyper.typedPos(originalFunction.pos)(newStat)
- TransformedFunction(anonymousClassDef, typedNewStat, accessorMethod)
+ DelambdafyAnonClass(anonymousClassDef, typedNewStat)
}
/**
diff --git a/src/reflect/scala/reflect/api/Quasiquotes.scala b/src/reflect/scala/reflect/api/Quasiquotes.scala
index eaae05bed5..554b43afaf 100644
--- a/src/reflect/scala/reflect/api/Quasiquotes.scala
+++ b/src/reflect/scala/reflect/api/Quasiquotes.scala
@@ -3,7 +3,7 @@ package api
trait Quasiquotes { self: Universe =>
- /** Implicit class that introduces `q`, `tq`, `cq,` `p` and `fq` string interpolators
+ /** Implicit class that introduces `q`, `tq`, `cq,` `pq` and `fq` string interpolators
* that are also known as quasiquotes. With their help you can easily manipulate
* Scala reflection ASTs.
*
diff --git a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala
index c80b94bf89..1ccade2172 100644
--- a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala
+++ b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala
@@ -8,6 +8,7 @@ package tools.nsc
package interpreter
import java.lang.{ ClassLoader => JavaClassLoader, Iterable => JIterable }
+import scala.tools.asm.Opcodes
import scala.tools.nsc.util.ScalaClassLoader
import java.io.{ ByteArrayInputStream, CharArrayWriter, FileNotFoundException, PrintWriter, StringWriter, Writer }
import java.util.{ Locale }
@@ -758,32 +759,19 @@ object JavapClass {
import scala.tools.asm.ClassReader
import scala.tools.asm.Opcodes.INVOKESTATIC
import scala.tools.asm.tree.{ ClassNode, MethodInsnNode }
- // the accessor methods invoked statically by the apply of the given closure class
- def accesses(s: String): Seq[(String, String)] = {
- val accessor = """accessor\$\d+""".r
+ def callees(s: String): List[(String, String)] = {
loader classReader s withMethods { ms =>
- ms filter (_.name == "apply") flatMap (_.instructions.toArray.collect {
- case i: MethodInsnNode if i.getOpcode == INVOKESTATIC && when(i.name) { case accessor(_*) => true } => (i.owner, i.name)
- })
+ val nonBridgeApplyMethods = ms filter (_.name == "apply") filter (n => (n.access & Opcodes.ACC_BRIDGE) == 0)
+ val instructions = nonBridgeApplyMethods flatMap (_.instructions.toArray)
+ instructions.collect {
+ case i: MethodInsnNode => (i.owner, i.name)
+ }.toList
}
}
- // get the k.$anonfun for the accessor k.m
- def anonOf(k: String, m: String): String = {
- val res =
- loader classReader k withMethods { ms =>
- ms filter (_.name == m) flatMap (_.instructions.toArray.collect {
- case i: MethodInsnNode if i.getOpcode == INVOKESTATIC && i.name.startsWith("$anonfun") => i.name
- })
- }
- assert(res.size == 1)
- res.head
- }
- // the lambdas invoke accessors that call the anonfuns of interest. Filter k on the k#$anonfuns.
- val ack = accesses(lambda)
- assert(ack.size == 1) // There can be only one.
- ack.head match {
- case (k, _) if target.isModule && !(k endsWith "$") => None
- case (k, m) => Some(s"${k}#${anonOf(k, m)}")
+ callees(lambda) match {
+ case (k, _) :: Nil if target.isModule && !(k endsWith "$") => None
+ case (k, m) :: _ => Some(s"${k}#${m}")
+ case _ => None
}
}
/** Translate the supplied targets to patterns for anonfuns.
diff --git a/src/scaladoc/scala/tools/nsc/doc/Index.scala b/src/scaladoc/scala/tools/nsc/doc/Index.scala
index 84545e9201..a11ca38a86 100644
--- a/src/scaladoc/scala/tools/nsc/doc/Index.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/Index.scala
@@ -11,4 +11,6 @@ trait Index {
type SymbolMap = SortedMap[String, SortedSet[model.MemberEntity]]
def firstLetterIndex: Map[Char, SymbolMap]
+
+ def hasDeprecatedMembers: Boolean
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala
index 61ab18d42d..8313d842e5 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala
@@ -123,6 +123,8 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
new page.Index(universe, index) writeFor this
new page.IndexScript(universe, index) writeFor this
+ if (index.hasDeprecatedMembers)
+ new page.DeprecatedIndex(universe, index) writeFor this
try {
writeTemplates(_ writeFor this)
for (letter <- index.firstLetterIndex) {
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/DeprecatedIndex.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/DeprecatedIndex.scala
new file mode 100755
index 0000000000..f257153bd7
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/DeprecatedIndex.scala
@@ -0,0 +1,58 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ */
+
+package scala
+package tools
+package nsc
+package doc
+package html
+package page
+
+import doc.model._
+
+class DeprecatedIndex(universe: Universe, index: doc.Index) extends HtmlPage {
+
+ def path = List("deprecated-list.html")
+
+ def title = {
+ val s = universe.settings
+ ( if (!s.doctitle.isDefault) s.doctitle.value else "" ) +
+ ( if (!s.docversion.isDefault) (" " + s.docversion.value) else "" )
+ }
+
+ def headers =
+ <xml:group>
+ <link href={ relativeLinkTo(List("ref-index.css", "lib")) } media="screen" type="text/css" rel="stylesheet"/>
+ <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} }></script>
+ </xml:group>
+
+
+ private def entry(name: String, methods: Iterable[MemberEntity]) = {
+ val occurrences = methods.filter(_.deprecation.isDefined).map(method =>
+ templateToHtml(method.inDefinitionTemplates.head)
+ ).toList.distinct
+
+ <div class="entry">
+ <div class="name">{ name }</div>
+ <div class="occurrences">{
+ for (owner <- occurrences) yield owner ++ scala.xml.Text(" ")
+ }</div>
+ </div>
+ }
+
+ def deprecatedEntries = {
+ val available = ('_' +: ('a' to 'z')).flatMap(index.firstLetterIndex.get)
+
+ for (group <- available;
+ value <- group if value._2.find(_.deprecation.isDefined).isDefined)
+ yield value
+ }
+
+ def body =
+ <body>{
+ for(value <- deprecatedEntries) yield
+ entry(value._1, value._2.view)
+ }</body>
+
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala
index ce3a5eb1fc..6bfe480e33 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala
@@ -61,12 +61,17 @@ class Index(universe: doc.Universe, val index: doc.Index) extends HtmlPage {
}
}
+ def deprecated: NodeSeq = if (index.hasDeprecatedMembers)
+ <a target="template" href="deprecated-list.html">deprecated</a>
+ else
+ <span>deprecated</span>
+
def browser =
<div id="browser" class="ui-layout-west">
<div class="ui-west-center">
<div id="filter">
<div id="textfilter"></div>
- <div id="letters">{ letters }</div>
+ <div id="letters">{ letters } &#8211; { deprecated }</div>
</div>
<div class="pack" id="tpl">{
def packageElem(pack: model.Package): NodeSeq = {
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala
index 643a089aae..ad53dc6bfa 100755
--- a/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala
@@ -14,10 +14,12 @@ object IndexModelFactory {
def makeIndex(universe: Universe): Index = new Index {
- lazy val firstLetterIndex: Map[Char, SymbolMap] = {
+ lazy val (firstLetterIndex, hasDeprecatedMembers): (Map[Char, SymbolMap], Boolean) = {
object result extends mutable.HashMap[Char,SymbolMap] {
+ var deprecated = false
+
/* symbol name ordering */
implicit def orderingMap = math.Ordering.String
@@ -32,6 +34,8 @@ object IndexModelFactory {
val members = letter.get(d.name).getOrElse {
SortedSet.empty[MemberEntity](Ordering.by { _.toString })
} + d
+ if (!deprecated && members.find(_.deprecation.isDefined).isDefined)
+ deprecated = true
this(firstLetter) = letter + (d.name -> members)
}
}
@@ -50,7 +54,7 @@ object IndexModelFactory {
gather(universe.rootPackage)
- result.toMap
+ (result.toMap, result.deprecated)
}
}
}
diff --git a/test/files/run/repl-javap-lambdas.scala b/test/files/run/repl-javap-lambdas.scala
index c503c99d66..76a6ec8450 100644
--- a/test/files/run/repl-javap-lambdas.scala
+++ b/test/files/run/repl-javap-lambdas.scala
@@ -16,7 +16,7 @@ object Test extends JavapTest {
// three anonfuns of Betty#g
override def yah(res: Seq[String]) = {
import PartialFunction.{ cond => when }
- val r = """\s*private static final .* \$anonfun\$\d+\(.*""".r
+ val r = """.*final .* .*\$anonfun\$\d+\(.*""".r
def filtered = res filter (when(_) { case r(_*) => true })
3 == filtered.size
}
diff --git a/test/files/run/t3368-b.check b/test/files/run/t3368-b.check
new file mode 100644
index 0000000000..4cbe98c577
--- /dev/null
+++ b/test/files/run/t3368-b.check
@@ -0,0 +1,89 @@
+[[syntax trees at end of parser]] // newSource1.scala
+package <empty> {
+ abstract trait X extends scala.AnyRef {
+ def $init$() = {
+ ()
+ };
+ def x = {
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hi & bye"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("red & black"));
+ $buf
+ }
+ };
+ abstract trait Y extends scala.AnyRef {
+ def $init$() = {
+ ()
+ };
+ def y = {
+ {
+ new _root_.scala.xml.Elem(null, "a", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus({
+ {
+ new _root_.scala.xml.Elem(null, "b", _root_.scala.xml.Null, $scope, true)
+ }
+ });
+ $buf.$amp$plus(new _root_.scala.xml.Text("start"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hi & bye"));
+ $buf.$amp$plus({
+ {
+ new _root_.scala.xml.Elem(null, "c", _root_.scala.xml.Null, $scope, true)
+ }
+ });
+ $buf.$amp$plus(new _root_.scala.xml.Text("world"));
+ $buf.$amp$plus({
+ {
+ new _root_.scala.xml.Elem(null, "d", _root_.scala.xml.Null, $scope, true)
+ }
+ });
+ $buf.$amp$plus(new _root_.scala.xml.Text("stuff"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("red & black"));
+ $buf
+ }: _*))
+ }
+ }
+ };
+ abstract trait Z extends scala.AnyRef {
+ def $init$() = {
+ ()
+ };
+ def d = new _root_.scala.xml.PCData("hello, world");
+ def e = {
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
+ $buf
+ };
+ def f = {
+ {
+ new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.Text("x"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
+ $buf
+ }: _*))
+ }
+ };
+ def g = {
+ {
+ new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
+ $buf
+ }: _*))
+ }
+ };
+ def h = {
+ {
+ new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
+ $buf
+ }: _*))
+ }
+ }
+ }
+}
+
diff --git a/test/files/run/t3368-b.scala b/test/files/run/t3368-b.scala
new file mode 100644
index 0000000000..108cb9a5ee
--- /dev/null
+++ b/test/files/run/t3368-b.scala
@@ -0,0 +1,26 @@
+
+import scala.tools.partest.ParserTest
+
+
+object Test extends ParserTest {
+
+ override def code = """
+ trait X {
+ // error: in XML literal: name expected, but char '!' cannot start a name
+ def x = <![CDATA[hi & bye]]> <![CDATA[red & black]]>
+ }
+ trait Y {
+ def y = <a><b/>start<![CDATA[hi & bye]]><c/>world<d/>stuff<![CDATA[red & black]]></a>
+ }
+ trait Z {
+ def d = <![CDATA[hello, world]]>
+ def e = <![CDATA[hello, world]]><![CDATA[hello, world]]> // top level not coalesced
+ def f = <foo>x<![CDATA[hello, world]]></foo> // adjoining text
+ def g = <foo><![CDATA[hello, world]]></foo> // text node when coalescing
+ def h = <foo><![CDATA[hello, world]]><![CDATA[hello, world]]></foo>
+ }
+ """
+
+ // not coalescing
+ override def extraSettings = s"${super.extraSettings} -Xxml:-coalescing"
+}
diff --git a/test/files/run/t3368-c.check b/test/files/run/t3368-c.check
new file mode 100644
index 0000000000..e0c10cc0dd
--- /dev/null
+++ b/test/files/run/t3368-c.check
@@ -0,0 +1,85 @@
+[[syntax trees at end of parser]] // newSource1.scala
+package <empty> {
+ abstract trait X extends scala.AnyRef {
+ def $init$() = {
+ ()
+ };
+ def x = {
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.Text("hi & bye"));
+ $buf.$amp$plus(new _root_.scala.xml.Text("red & black"));
+ $buf
+ }
+ };
+ abstract trait Y extends scala.AnyRef {
+ def $init$() = {
+ ()
+ };
+ def y = {
+ {
+ new _root_.scala.xml.Elem(null, "a", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus({
+ {
+ new _root_.scala.xml.Elem(null, "b", _root_.scala.xml.Null, $scope, true)
+ }
+ });
+ $buf.$amp$plus(new _root_.scala.xml.Text("starthi & bye"));
+ $buf.$amp$plus({
+ {
+ new _root_.scala.xml.Elem(null, "c", _root_.scala.xml.Null, $scope, true)
+ }
+ });
+ $buf.$amp$plus(new _root_.scala.xml.Text("world"));
+ $buf.$amp$plus({
+ {
+ new _root_.scala.xml.Elem(null, "d", _root_.scala.xml.Null, $scope, true)
+ }
+ });
+ $buf.$amp$plus(new _root_.scala.xml.Text("stuffred & black"));
+ $buf
+ }: _*))
+ }
+ }
+ };
+ abstract trait Z extends scala.AnyRef {
+ def $init$() = {
+ ()
+ };
+ def d = new _root_.scala.xml.Text("hello, world");
+ def e = {
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.Text("hello, world"));
+ $buf.$amp$plus(new _root_.scala.xml.Text("hello, world"));
+ $buf
+ };
+ def f = {
+ {
+ new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.Text("xhello, world"));
+ $buf
+ }: _*))
+ }
+ };
+ def g = {
+ {
+ new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.Text("hello, world"));
+ $buf
+ }: _*))
+ }
+ };
+ def h = {
+ {
+ new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.Text("hello, worldhello, world"));
+ $buf
+ }: _*))
+ }
+ }
+ }
+}
+
diff --git a/test/files/run/t3368-c.scala b/test/files/run/t3368-c.scala
new file mode 100644
index 0000000000..5121794463
--- /dev/null
+++ b/test/files/run/t3368-c.scala
@@ -0,0 +1,26 @@
+
+import scala.tools.partest.ParserTest
+
+
+object Test extends ParserTest {
+
+ override def code = """
+ trait X {
+ // error: in XML literal: name expected, but char '!' cannot start a name
+ def x = <![CDATA[hi & bye]]> <![CDATA[red & black]]>
+ }
+ trait Y {
+ def y = <a><b/>start<![CDATA[hi & bye]]><c/>world<d/>stuff<![CDATA[red & black]]></a>
+ }
+ trait Z {
+ def d = <![CDATA[hello, world]]>
+ def e = <![CDATA[hello, world]]><![CDATA[hello, world]]> // top level not coalesced
+ def f = <foo>x<![CDATA[hello, world]]></foo> // adjoining text
+ def g = <foo><![CDATA[hello, world]]></foo> // text node when coalescing
+ def h = <foo><![CDATA[hello, world]]><![CDATA[hello, world]]></foo>
+ }
+ """
+
+ // default coalescing behavior, whatever that is today.
+ //override def extraSettings = s"${super.extraSettings} -Xxml:coalescing"
+}
diff --git a/test/files/run/t3368-d.check b/test/files/run/t3368-d.check
new file mode 100644
index 0000000000..4cbe98c577
--- /dev/null
+++ b/test/files/run/t3368-d.check
@@ -0,0 +1,89 @@
+[[syntax trees at end of parser]] // newSource1.scala
+package <empty> {
+ abstract trait X extends scala.AnyRef {
+ def $init$() = {
+ ()
+ };
+ def x = {
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hi & bye"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("red & black"));
+ $buf
+ }
+ };
+ abstract trait Y extends scala.AnyRef {
+ def $init$() = {
+ ()
+ };
+ def y = {
+ {
+ new _root_.scala.xml.Elem(null, "a", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus({
+ {
+ new _root_.scala.xml.Elem(null, "b", _root_.scala.xml.Null, $scope, true)
+ }
+ });
+ $buf.$amp$plus(new _root_.scala.xml.Text("start"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hi & bye"));
+ $buf.$amp$plus({
+ {
+ new _root_.scala.xml.Elem(null, "c", _root_.scala.xml.Null, $scope, true)
+ }
+ });
+ $buf.$amp$plus(new _root_.scala.xml.Text("world"));
+ $buf.$amp$plus({
+ {
+ new _root_.scala.xml.Elem(null, "d", _root_.scala.xml.Null, $scope, true)
+ }
+ });
+ $buf.$amp$plus(new _root_.scala.xml.Text("stuff"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("red & black"));
+ $buf
+ }: _*))
+ }
+ }
+ };
+ abstract trait Z extends scala.AnyRef {
+ def $init$() = {
+ ()
+ };
+ def d = new _root_.scala.xml.PCData("hello, world");
+ def e = {
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
+ $buf
+ };
+ def f = {
+ {
+ new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.Text("x"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
+ $buf
+ }: _*))
+ }
+ };
+ def g = {
+ {
+ new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
+ $buf
+ }: _*))
+ }
+ };
+ def h = {
+ {
+ new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
+ $buf
+ }: _*))
+ }
+ }
+ }
+}
+
diff --git a/test/files/run/t3368-d.scala b/test/files/run/t3368-d.scala
new file mode 100644
index 0000000000..5777c1a81e
--- /dev/null
+++ b/test/files/run/t3368-d.scala
@@ -0,0 +1,26 @@
+
+import scala.tools.partest.ParserTest
+
+
+object Test extends ParserTest {
+
+ override def code = """
+ trait X {
+ // error: in XML literal: name expected, but char '!' cannot start a name
+ def x = <![CDATA[hi & bye]]> <![CDATA[red & black]]>
+ }
+ trait Y {
+ def y = <a><b/>start<![CDATA[hi & bye]]><c/>world<d/>stuff<![CDATA[red & black]]></a>
+ }
+ trait Z {
+ def d = <![CDATA[hello, world]]>
+ def e = <![CDATA[hello, world]]><![CDATA[hello, world]]> // top level not coalesced
+ def f = <foo>x<![CDATA[hello, world]]></foo> // adjoining text
+ def g = <foo><![CDATA[hello, world]]></foo> // text node when coalescing
+ def h = <foo><![CDATA[hello, world]]><![CDATA[hello, world]]></foo>
+ }
+ """
+
+ // default under 2.12 is not coalescing
+ override def extraSettings = s"${super.extraSettings} -Xsource:212"
+}
diff --git a/test/files/run/t3368.check b/test/files/run/t3368.check
index 1d9dd677f6..e0c10cc0dd 100644
--- a/test/files/run/t3368.check
+++ b/test/files/run/t3368.check
@@ -6,8 +6,8 @@ package <empty> {
};
def x = {
val $buf = new _root_.scala.xml.NodeBuffer();
- $buf.$amp$plus(new _root_.scala.xml.PCData("hi & bye"));
- $buf.$amp$plus(new _root_.scala.xml.PCData("red & black"));
+ $buf.$amp$plus(new _root_.scala.xml.Text("hi & bye"));
+ $buf.$amp$plus(new _root_.scala.xml.Text("red & black"));
$buf
}
};
@@ -41,6 +41,45 @@ package <empty> {
}: _*))
}
}
+ };
+ abstract trait Z extends scala.AnyRef {
+ def $init$() = {
+ ()
+ };
+ def d = new _root_.scala.xml.Text("hello, world");
+ def e = {
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.Text("hello, world"));
+ $buf.$amp$plus(new _root_.scala.xml.Text("hello, world"));
+ $buf
+ };
+ def f = {
+ {
+ new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.Text("xhello, world"));
+ $buf
+ }: _*))
+ }
+ };
+ def g = {
+ {
+ new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.Text("hello, world"));
+ $buf
+ }: _*))
+ }
+ };
+ def h = {
+ {
+ new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.Text("hello, worldhello, world"));
+ $buf
+ }: _*))
+ }
+ }
}
}
diff --git a/test/files/run/t3368.scala b/test/files/run/t3368.scala
index 15acba5099..284fed0784 100644
--- a/test/files/run/t3368.scala
+++ b/test/files/run/t3368.scala
@@ -12,7 +12,15 @@ object Test extends ParserTest {
trait Y {
def y = <a><b/>start<![CDATA[hi & bye]]><c/>world<d/>stuff<![CDATA[red & black]]></a>
}
+ trait Z {
+ def d = <![CDATA[hello, world]]>
+ def e = <![CDATA[hello, world]]><![CDATA[hello, world]]> // top level not coalesced
+ def f = <foo>x<![CDATA[hello, world]]></foo> // adjoining text
+ def g = <foo><![CDATA[hello, world]]></foo> // text node when coalescing
+ def h = <foo><![CDATA[hello, world]]><![CDATA[hello, world]]></foo>
+ }
"""
+ // coalescing
override def extraSettings = s"${super.extraSettings} -Xxml:coalescing"
}
diff --git a/test/files/run/t9268.check b/test/files/run/t9268.check
new file mode 100644
index 0000000000..90ef940eb3
--- /dev/null
+++ b/test/files/run/t9268.check
@@ -0,0 +1,5 @@
+Compiling Client1
+pos: NoPosition Class Waiter not found - continuing with a stub. WARNING
+Compiling Client2
+pos: NoPosition Class Waiter not found - continuing with a stub. WARNING
+pos: NoPosition Unable to locate class corresponding to inner class entry for Predicate in owner Waiter ERROR
diff --git a/test/files/run/t9268/Java.java b/test/files/run/t9268/Java.java
new file mode 100644
index 0000000000..c9a0bec3ff
--- /dev/null
+++ b/test/files/run/t9268/Java.java
@@ -0,0 +1,12 @@
+public class Java {
+}
+
+class Partial {
+ public <E extends java.lang.Exception> long waitFor(long l, Waiter.Predicate<E> pred) throws E {
+ return 0L;
+ }
+}
+
+class Waiter {
+ interface Predicate<E> {}
+}
diff --git a/test/files/run/t9268/Test.scala b/test/files/run/t9268/Test.scala
new file mode 100644
index 0000000000..813cbe7b60
--- /dev/null
+++ b/test/files/run/t9268/Test.scala
@@ -0,0 +1,40 @@
+import scala.tools.partest._
+import java.io.File
+
+object Test extends StoreReporterDirectTest {
+ def code = ???
+
+ def compileCode(code: String) = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code)
+ }
+
+ def client1 = """
+ class Client1 { def p(p: Partial) = p.toString }
+ """
+
+ def client2 = """
+ class Client2 { def p(p: Partial) = p.waitFor() }
+ """
+
+ def deleteClass(s: String) = {
+ val f = new File(testOutput.path, s + ".class")
+ assert(f.exists)
+ f.delete()
+ }
+
+ def show(): Unit = {
+ deleteClass("Waiter")
+ deleteClass("Waiter$Predicate")
+
+ // Used to crash in Java Generic Signature parsing
+ println("Compiling Client1")
+ compileCode(client1)
+ println(storeReporter.infos.mkString("\n"))
+ storeReporter.reset()
+ println("Compiling Client2")
+ compileCode(client2)
+ println(storeReporter.infos.mkString("\n"))
+ }
+}
+
diff --git a/test/junit/scala/collection/mutable/VectorTest.scala b/test/junit/scala/collection/mutable/VectorTest.scala
index e9c4d44a72..b3219d1b02 100644
--- a/test/junit/scala/collection/mutable/VectorTest.scala
+++ b/test/junit/scala/collection/mutable/VectorTest.scala
@@ -38,7 +38,6 @@ class VectorTest {
def iteratorCat() {
def its = vecs.map(_.toList.toIterator)
val cats = vecs.map(a => its.map(a ++ _))
- println(cats)
assert( cats == ans )
}
diff --git a/test/scaladoc/resources/SI-4476.scala b/test/scaladoc/resources/SI-4476.scala
new file mode 100644
index 0000000000..eb35ef45e7
--- /dev/null
+++ b/test/scaladoc/resources/SI-4476.scala
@@ -0,0 +1,9 @@
+package foo
+
+@deprecated("","")
+class A
+
+class B {
+ @deprecated("","")
+ def bar = 1
+}
diff --git a/test/scaladoc/scalacheck/DeprecatedIndexTest.scala b/test/scaladoc/scalacheck/DeprecatedIndexTest.scala
new file mode 100644
index 0000000000..4a5a2001d4
--- /dev/null
+++ b/test/scaladoc/scalacheck/DeprecatedIndexTest.scala
@@ -0,0 +1,50 @@
+import org.scalacheck._
+import org.scalacheck.Prop._
+
+import scala.tools.nsc.doc
+import scala.tools.nsc.doc.html.page.DeprecatedIndex
+import java.net.{URLClassLoader, URLDecoder}
+
+object Test extends Properties("IndexScript") {
+
+ def getClasspath = {
+ // these things can be tricky
+ // this test previously relied on the assumption that the current thread's classloader is an url classloader and contains all the classpaths
+ // does partest actually guarantee this? to quote Leonard Nimoy: The answer, of course, is no.
+ // this test _will_ fail again some time in the future.
+ // Footnote: java.lang.ClassCastException: org.apache.tools.ant.loader.AntClassLoader5 cannot be cast to java.net.URLClassLoader
+ val loader = Thread.currentThread.getContextClassLoader.asInstanceOf[URLClassLoader]
+ val paths = loader.getURLs.map(u => URLDecoder.decode(u.getPath))
+ paths mkString java.io.File.pathSeparator
+ }
+
+ val docFactory = {
+ val settings = new doc.Settings({Console.err.println(_)})
+ settings.scaladocQuietRun = true
+ settings.nowarn.value = true
+ settings.classpath.value = getClasspath
+ val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
+ new doc.DocFactory(reporter, settings)
+ }
+
+ val indexModelFactory = doc.model.IndexModelFactory
+
+ def createDeprecatedScript(path: String) =
+ docFactory.makeUniverse(Left(List(path))) match {
+ case Some(universe) => {
+ val index = new DeprecatedIndex(universe, indexModelFactory.makeIndex(universe))
+ Some(index)
+ }
+ case _ =>
+ None
+ }
+
+ property("deprecated-list page lists deprecated members") = {
+ createDeprecatedScript("test/scaladoc/resources/SI-4476.scala") match {
+ case Some(p) =>
+ p.deprecatedEntries.find(_._1 == "A").isDefined &&
+ p.deprecatedEntries.find(_._1 == "bar").isDefined
+ case None => false
+ }
+ }
+}
diff --git a/test/scaladoc/scalacheck/IndexTest.scala b/test/scaladoc/scalacheck/IndexTest.scala
index abc0e5da01..7dbd2103a6 100644
--- a/test/scaladoc/scalacheck/IndexTest.scala
+++ b/test/scaladoc/scalacheck/IndexTest.scala
@@ -71,7 +71,7 @@ object Test extends Properties("Index") {
case None => false
}
}
- property("browser contants a script element") = {
+ property("browser contains a script element") = {
createIndex("src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala") match {
case Some(index) =>
(index.browser \ "script").size == 1
@@ -86,4 +86,10 @@ object Test extends Properties("Index") {
case None => false
}
}
+ property("index should report if there are deprecated members") = {
+ createIndex("test/scaladoc/resources/SI-4476.scala") match {
+ case Some(indexPage) => indexPage.index.hasDeprecatedMembers
+ case None => false
+ }
+ }
}