summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--README.md24
-rw-r--r--bincompat-backward.whitelist.conf20
-rw-r--r--bincompat-forward.whitelist.conf20
-rw-r--r--build.sbt4
-rw-r--r--project/BuildSettings.scala2
-rw-r--r--project/GenerateAnyVals.scala2
-rw-r--r--project/JarJar.scala2
-rw-r--r--project/MiMa.scala2
-rw-r--r--project/Osgi.scala2
-rw-r--r--project/ParserUtil.scala2
-rw-r--r--project/PartestUtil.scala6
-rw-r--r--project/Quiet.scala2
-rw-r--r--project/ScalaOptionParser.scala2
-rw-r--r--project/ScalaTool.scala2
-rw-r--r--project/ScriptCommands.scala2
-rw-r--r--project/VersionUtil.scala71
-rw-r--r--project/plugins.sbt9
-rw-r--r--scripts/common9
-rwxr-xr-xscripts/jobs/integrate/bootstrap199
-rw-r--r--spec/04-basic-declarations-and-definitions.md9
-rw-r--r--spec/05-classes-and-objects.md10
-rw-r--r--spec/06-expressions.md4
-rw-r--r--spec/README.md6
-rw-r--r--spec/_config.yml2
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala5
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala8
-rw-r--r--src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala7
-rw-r--r--src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala86
-rw-r--r--src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala12
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaVersion.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala15
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala49
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala9
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala66
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala25
-rw-r--r--src/compiler/scala/tools/util/PathResolver.scala2
-rw-r--r--src/library/scala/collection/mutable/FlatHashTable.scala4
-rw-r--r--src/library/scala/collection/mutable/HashTable.scala15
-rw-r--r--src/library/scala/collection/mutable/OpenHashMap.scala4
-rw-r--r--src/library/scala/util/Properties.scala6
-rw-r--r--src/reflect/scala/reflect/internal/BaseTypeSeqs.scala5
-rw-r--r--src/reflect/scala/reflect/internal/Names.scala6
-rw-r--r--src/reflect/scala/reflect/internal/Scopes.scala33
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala2
-rw-r--r--src/reflect/scala/reflect/internal/TreeInfo.scala1
-rw-r--r--src/reflect/scala/reflect/internal/TypeDebugging.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala27
-rw-r--r--src/reflect/scala/reflect/internal/pickling/UnPickler.scala2
-rw-r--r--src/reflect/scala/reflect/internal/transform/Erasure.scala5
-rw-r--r--src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala2
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedOps.scala11
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ExprTyper.scala14
-rw-r--r--src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala33
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplProps.scala2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Scripted.scala2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala10
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/Page.scala27
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala10
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala2
-rw-r--r--test/files/neg/no-predef.check8
-rwxr-xr-xtest/files/neg/t10207.check4
-rw-r--r--test/files/neg/t10207.scala16
-rw-r--r--test/files/neg/t2102.check4
-rw-r--r--test/files/neg/type-diagnostics.check4
-rw-r--r--test/files/pos/overloaded_ho_fun.scala15
-rw-r--r--test/files/pos/sam_erasure_boundedwild.scala11
-rw-r--r--test/files/pos/t4237.scala15
-rw-r--r--test/files/run/elidable-opt.check2
-rw-r--r--test/files/run/elidable.check2
-rw-r--r--test/files/run/elidable.scala42
-rw-r--r--test/files/run/literals.scala10
-rw-r--r--test/files/run/t10026.check1
-rw-r--r--test/files/run/t10026.scala11
-rw-r--r--test/files/run/t10171/Test.scala59
-rw-r--r--test/files/run/t9880-9881.check36
-rw-r--r--test/files/run/t9880-9881.scala29
-rw-r--r--test/junit/scala/reflect/internal/NamesTest.scala25
-rw-r--r--test/junit/scala/sys/process/PipedProcessTest.scala30
-rw-r--r--test/junit/scala/tools/nsc/classpath/JrtClassPathTest.scala41
-rw-r--r--test/scalacheck/duration.scala5
-rw-r--r--test/scaladoc/run/SI-9704.check4
-rw-r--r--test/scaladoc/run/SI-9704.scala22
-rw-r--r--test/scaladoc/run/inlineToStr-strips-unwanted-text.check1
-rw-r--r--test/scaladoc/run/inlineToStr-strips-unwanted-text.scala58
-rw-r--r--test/scaladoc/run/shortDescription-annotation.scala19
-rwxr-xr-xtools/get-scala-commit-date22
-rw-r--r--tools/get-scala-commit-date.bat9
-rwxr-xr-xtools/get-scala-commit-sha22
-rw-r--r--tools/get-scala-commit-sha.bat9
-rw-r--r--versions.properties2
92 files changed, 1001 insertions, 471 deletions
diff --git a/README.md b/README.md
index 5d565a4418..6a9360947f 100644
--- a/README.md
+++ b/README.md
@@ -104,10 +104,13 @@ Core commands:
- `partest` runs partest tests (accepts options, try `partest --help`)
- `publishLocal` publishes a distribution locally (can be used as `scalaVersion` in
other sbt projects)
- - Optionally `set baseVersionSuffix := "abcd123-SNAPSHOT"`
+ - Optionally `set baseVersionSuffix := "-bin-abcd123-SNAPSHOT"`
where `abcd123` is the git hash of the revision being published. You can also
- use something custom like `"mypatch"`. This changes the version number from
- `2.12.0-SNAPSHOT` to something more stable (`2.12.0-abcd123-SNAPSHOT`).
+ use something custom like `"-bin-mypatch"`. This changes the version number from
+ `2.12.2-SNAPSHOT` to something more stable (`2.12.2-bin-abcd123-SNAPSHOT`).
+ - Note that the `-bin` string marks the version binary compatible. Using it in
+ sbt will cause the `scalaBinaryVersion` to be `2.12`. If the version is not
+ binary compatible, we recommend using `-pre`, e.g., `2.13.0-pre-abcd123-SNAPSHOT`.
- Optionally `set publishArtifact in (Compile, packageDoc) in ThisBuild := false`
to skip generating / publishing API docs (speeds up the process).
@@ -199,8 +202,9 @@ CI performs a full bootstrap. The first task, `validate-publish-core`, publishes
a build of your commit to the temporary repository
https://scala-ci.typesafe.com/artifactory/scala-pr-validation-snapshots.
Note that this build is not yet bootstrapped, its bytecode is built using the
-current `starr`. The version number is `2.12.0-abcd123-SNAPSHOT` where `abcd123`
-is the commit hash.
+current `starr`. The version number is `2.12.2-bin-abcd123-SNAPSHOT` where `abcd123`
+is the commit hash. For binary incompatible builds, the version number is
+`2.13.0-pre-abcd123-SNAPSHOT`.
You can use Scala builds in the validation repository locally by adding a resolver
and specifying the corresponding `scalaVersion`:
@@ -208,7 +212,7 @@ and specifying the corresponding `scalaVersion`:
```
$ sbt
> set resolvers += "pr" at "https://scala-ci.typesafe.com/artifactory/scala-pr-validation-snapshots/"
-> set scalaVersion := "2.12.0-abcd123-SNAPSHOT"
+> set scalaVersion := "2.12.2-bin-abcd123-SNAPSHOT"
> console
```
@@ -220,7 +224,7 @@ tested version during CI validation.
The Scala CI builds nightly download releases (including all modules) and publishes
them to the following locations:
- [2.12.x](http://www.scala-lang.org/files/archive/nightly/2.12.x/?C=M;O=D)
- - [2.11.x](http://www.scala-lang.org/files/archive/nightly/2.11.x/?C=M;O=A)
+ - [2.11.x](http://www.scala-lang.org/files/archive/nightly/2.11.x/?C=M;O=D)
The CI also publishes nightly API docs:
- [2.12.x](http://www.scala-lang.org/files/archive/nightly/2.12.x/api/?C=M;O=D)
@@ -228,10 +232,8 @@ The CI also publishes nightly API docs:
- [2.11.x](http://www.scala-lang.org/files/archive/nightly/2.11.x/api/?C=M;O=D)
- [symlink to the latest](http://www.scala-lang.org/files/archive/nightly/2.11.x/api/2.11.x/)
-Note that we currently don't publish nightly (or SNAPSHOT) builds in maven or ivy
-format to any repository. You can track progress on this front at
-[scala-jenkins-infra#133](https://github.com/scala/scala-jenkins-infra/issues/133)
-and [scala-dev#68](https://github.com/scala/scala-dev/issues/68).
+Using a nightly build in sbt is explained in
+[this answer on Stack Overflow](http://stackoverflow.com/questions/40622878)
## Scala CI Internals
diff --git a/bincompat-backward.whitelist.conf b/bincompat-backward.whitelist.conf
index 57dc564e8a..3d4e40a00d 100644
--- a/bincompat-backward.whitelist.conf
+++ b/bincompat-backward.whitelist.conf
@@ -22,6 +22,18 @@ filter {
problemName=DirectMissingMethodProblem
},
{
+ matchName="scala.collection.mutable.OpenHashMap.nextPositivePowerOfTwo"
+ problemName=DirectMissingMethodProblem
+ },
+ {
+ matchName="scala.collection.mutable.HashTable.nextPositivePowerOfTwo"
+ problemName=DirectMissingMethodProblem
+ },
+ {
+ matchName="scala.collection.mutable.HashTable.powerOfTwo"
+ problemName=DirectMissingMethodProblem
+ },
+ {
matchName="scala.reflect.runtime.JavaMirrors#JavaMirror.unpickleClass"
problemName=IncompatibleMethTypeProblem
},
@@ -34,6 +46,14 @@ filter {
problemName=DirectMissingMethodProblem
},
{
+ matchName="scala.reflect.runtime.SynchronizedOps.scala$reflect$runtime$SynchronizedOps$$super$newMappedBaseTypeSeq"
+ problemName=ReversedMissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.runtime.SynchronizedOps#SynchronizedBaseTypeSeq.lateMap"
+ problemName=DirectMissingMethodProblem
+ },
+ {
matchName="scala.collection.immutable.HashMap.contains0"
problemName=DirectMissingMethodProblem
},
diff --git a/bincompat-forward.whitelist.conf b/bincompat-forward.whitelist.conf
index ad778f447a..94b2a57b25 100644
--- a/bincompat-forward.whitelist.conf
+++ b/bincompat-forward.whitelist.conf
@@ -22,6 +22,10 @@ filter {
problemName=DirectMissingMethodProblem
},
{
+ matchName="scala.collection.mutable.HashTable.nextPositivePowerOfTwo"
+ problemName=DirectMissingMethodProblem
+ }
+ {
matchName="scala.reflect.runtime.Settings.Yvirtpatmat"
problemName=DirectMissingMethodProblem
},
@@ -44,6 +48,14 @@ filter {
problemName=DirectMissingMethodProblem
},
{
+ matchName="scala.reflect.runtime.SynchronizedOps.newMappedBaseTypeSeq"
+ problemName=DirectMissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.runtime.JavaUniverse.newMappedBaseTypeSeq"
+ problemName=DirectMissingMethodProblem
+ },
+ {
matchName="scala.collection.immutable.HashMap.contains0"
problemName=DirectMissingMethodProblem
},
@@ -415,6 +427,14 @@ filter {
{
matchName="scala.annotation.showAsInfix"
problemName=MissingClassProblem
+ },
+ {
+ matchName="scala.util.PropertiesTrait.coloredOutputEnabled"
+ problemName=DirectMissingMethodProblem
+ },
+ {
+ matchName="scala.util.Properties.coloredOutputEnabled"
+ problemName=DirectMissingMethodProblem
}
]
}
diff --git a/build.sbt b/build.sbt
index 6bb9c10166..da823d7df7 100644
--- a/build.sbt
+++ b/build.sbt
@@ -32,6 +32,7 @@
* - to modularize the Scala compiler or library further
*/
+import scala.build._
import VersionUtil._
// Scala dependencies:
@@ -1017,6 +1018,9 @@ commands += Command("partest")(_ => PartestUtil.partestParser((baseDirectory in
("test/it:testOnly -- " + parsed) :: state
}
+// Watch the test files also so ~partest triggers on test case changes
+watchSources ++= PartestUtil.testFilePaths((baseDirectory in ThisBuild).value, (baseDirectory in ThisBuild).value / "test")
+
// Add tab completion to scalac et al.
commands ++= {
val commands =
diff --git a/project/BuildSettings.scala b/project/BuildSettings.scala
index 76cd888a2d..8456f91f86 100644
--- a/project/BuildSettings.scala
+++ b/project/BuildSettings.scala
@@ -1,3 +1,5 @@
+package scala.build
+
import sbt._
/** This object defines keys that should be visible with an unqualified name in all .sbt files and the command line */
diff --git a/project/GenerateAnyVals.scala b/project/GenerateAnyVals.scala
index 84454cb0ed..f349bfd16b 100644
--- a/project/GenerateAnyVals.scala
+++ b/project/GenerateAnyVals.scala
@@ -1,3 +1,5 @@
+package scala.build
+
/** Code generation of the AnyVal types and their companions. */
trait GenerateAnyValReps {
self: GenerateAnyVals =>
diff --git a/project/JarJar.scala b/project/JarJar.scala
index 918060c9ee..3cb9e4cfff 100644
--- a/project/JarJar.scala
+++ b/project/JarJar.scala
@@ -1,3 +1,5 @@
+package scala.build
+
import org.pantsbuild.jarjar
import org.pantsbuild.jarjar._
import org.pantsbuild.jarjar.util._
diff --git a/project/MiMa.scala b/project/MiMa.scala
index ceda8f5594..fb9bb175ab 100644
--- a/project/MiMa.scala
+++ b/project/MiMa.scala
@@ -1,3 +1,5 @@
+package scala.build
+
// It would be nice to use sbt-mima-plugin here, but the plugin is missing
// at least two features we need:
// * ability to run MiMa twice, swapping `curr` and `prev`, to detect
diff --git a/project/Osgi.scala b/project/Osgi.scala
index 082fd91ed1..b05751958a 100644
--- a/project/Osgi.scala
+++ b/project/Osgi.scala
@@ -1,3 +1,5 @@
+package scala.build
+
import aQute.bnd.osgi.Builder
import aQute.bnd.osgi.Constants._
import java.util.Properties
diff --git a/project/ParserUtil.scala b/project/ParserUtil.scala
index cdaf8831a5..bbd9129dbe 100644
--- a/project/ParserUtil.scala
+++ b/project/ParserUtil.scala
@@ -1,3 +1,5 @@
+package scala.build
+
import sbt._
import sbt.complete.Parser._
import sbt.complete.Parsers._
diff --git a/project/PartestUtil.scala b/project/PartestUtil.scala
index 23570a88ec..6d2c9a4c45 100644
--- a/project/PartestUtil.scala
+++ b/project/PartestUtil.scala
@@ -1,3 +1,5 @@
+package scala.build
+
import sbt._
import sbt.complete._, Parser._, Parsers._
@@ -24,6 +26,10 @@ object PartestUtil {
isParentOf(testBase / srcPath, f, 2) || isParentOf(f, testBase / srcPath, Int.MaxValue)
}
}
+
+ def testFilePaths(globalBase: File, testBase: File): Seq[java.io.File] =
+ (new TestFiles("files", globalBase, testBase)).allTestCases.map(_._1)
+
/** A parser for the custom `partest` command */
def partestParser(globalBase: File, testBase: File): Parser[String] = {
val knownUnaryOptions = List(
diff --git a/project/Quiet.scala b/project/Quiet.scala
index 0a186d8f28..8ae08ad5a6 100644
--- a/project/Quiet.scala
+++ b/project/Quiet.scala
@@ -1,3 +1,5 @@
+package scala.build
+
import sbt._
import Keys._
diff --git a/project/ScalaOptionParser.scala b/project/ScalaOptionParser.scala
index 27ed1f0e6f..0208921959 100644
--- a/project/ScalaOptionParser.scala
+++ b/project/ScalaOptionParser.scala
@@ -1,3 +1,5 @@
+package scala.build
+
import ParserUtil._
import sbt._
import sbt.complete.Parser._
diff --git a/project/ScalaTool.scala b/project/ScalaTool.scala
index 98e18235c4..ace547c640 100644
--- a/project/ScalaTool.scala
+++ b/project/ScalaTool.scala
@@ -1,3 +1,5 @@
+package scala.build
+
import sbt._
import org.apache.commons.lang3.SystemUtils
import org.apache.commons.lang3.StringUtils.replaceEach
diff --git a/project/ScriptCommands.scala b/project/ScriptCommands.scala
index 8d5d09943a..f6b700f007 100644
--- a/project/ScriptCommands.scala
+++ b/project/ScriptCommands.scala
@@ -1,3 +1,5 @@
+package scala.build
+
import sbt._
import Keys._
import BuildSettings.autoImport._
diff --git a/project/VersionUtil.scala b/project/VersionUtil.scala
index 7c4909697f..aacbc78329 100644
--- a/project/VersionUtil.scala
+++ b/project/VersionUtil.scala
@@ -1,7 +1,11 @@
+package scala.build
+
import sbt._
import Keys._
-import java.util.Properties
+import java.util.{Date, Locale, Properties, TimeZone}
import java.io.{File, FileInputStream}
+import java.text.SimpleDateFormat
+
import scala.collection.JavaConverters._
import BuildSettings.autoImport._
@@ -49,19 +53,21 @@ object VersionUtil {
/** Compute the canonical, Maven and OSGi version number from `baseVersion` and `baseVersionSuffix`.
* Examples of the generated versions:
*
- * ("2.11.8", "SNAPSHOT" ) -> ("2.11.8-20151215-133023-7559aed", "2.11.8-SNAPSHOT", "2.11.8.v20151215-133023-7559aed")
- * ("2.11.8", "SHA-SNAPSHOT") -> ("2.11.8-20151215-133023-7559aed", "2.11.8-7559aed-SNAPSHOT", "2.11.8.v20151215-133023-7559aed")
- * ("2.11.8", "SHA-NIGHTLY" ) -> ("2.11.8-7559aed-nightly", "2.11.8-7559aed-nightly", "2.11.8.v20151215-133023-NIGHTLY-7559aed")
- * ("2.11.8", "" ) -> ("2.11.8", "2.11.8", "2.11.8.v20151215-133023-VFINAL-7559aed")
- * ("2.11.8", "M3" ) -> ("2.11.8-M3", "2.11.8-M3", "2.11.8.v20151215-133023-M3-7559aed")
- * ("2.11.8", "RC4" ) -> ("2.11.8-RC4", "2.11.8-RC4", "2.11.8.v20151215-133023-RC4-7559aed")
- * ("2.11.8-RC4", "SPLIT" ) -> ("2.11.8-RC4", "2.11.8-RC4", "2.11.8.v20151215-133023-RC4-7559aed")
+ * ("2.11.8", "SNAPSHOT" ) -> ("2.11.8-20151215-133023-7559aed", "2.11.8-bin-SNAPSHOT", "2.11.8.v20151215-133023-7559aed")
+ * ("2.11.8", "SHA-SNAPSHOT") -> ("2.11.8-20151215-133023-7559aed", "2.11.8-bin-7559aed-SNAPSHOT", "2.11.8.v20151215-133023-7559aed")
+ * ("2.11.8", "SHA" ) -> ("2.11.8-7559aed", "2.11.8-bin-7559aed", "2.11.8.v20151215-133023-7559aed")
+ * ("2.11.0", "SHA" ) -> ("2.11.0-7559aed", "2.11.0-pre-7559aed", "2.11.0.v20151215-133023-7559aed")
+ * ("2.11.8", "" ) -> ("2.11.8", "2.11.8", "2.11.8.v20151215-133023-VFINAL-7559aed")
+ * ("2.11.8", "M3" ) -> ("2.11.8-M3", "2.11.8-M3", "2.11.8.v20151215-133023-M3-7559aed")
+ * ("2.11.8", "RC4" ) -> ("2.11.8-RC4", "2.11.8-RC4", "2.11.8.v20151215-133023-RC4-7559aed")
+ * ("2.11.8-RC4", "SPLIT" ) -> ("2.11.8-RC4", "2.11.8-RC4", "2.11.8.v20151215-133023-RC4-7559aed")
*
* A `baseVersionSuffix` of "SNAPSHOT" is the default, which is used for local snapshot builds. The PR validation
- * job uses "SHA-SNAPSHOT". A proper version number for a nightly build can be computed with "SHA-nightly". An empty
+ * job uses "SHA-SNAPSHOT". A proper version number for an integration build can be computed with "SHA". An empty
* suffix is used for releases. All other suffix values are treated as RC / milestone builds. The special suffix
* value "SPLIT" is used to split the real suffix off from `baseVersion` instead and then apply the usual logic. */
private lazy val versionPropertiesImpl: Def.Initialize[Versions] = Def.setting {
+ val log = sLog.value
val (base, suffix) = {
val (b, s) = (baseVersion.value, baseVersionSuffix.value)
@@ -72,23 +78,44 @@ object VersionUtil {
} else (b, s)
}
- def executeTool(tool: String) = {
- val cmd =
- if (System.getProperty("os.name").toLowerCase.contains("windows"))
- s"cmd.exe /c tools\\$tool.bat -p"
- else s"tools/$tool"
- Process(cmd).lines.head
+ val (dateObj, sha) = {
+ try {
+ // Use JGit to get the commit date and SHA
+ import org.eclipse.jgit.storage.file.FileRepositoryBuilder
+ import org.eclipse.jgit.revwalk.RevWalk
+ val db = new FileRepositoryBuilder().findGitDir.build
+ val head = db.resolve("HEAD")
+ if(head eq null) {
+ log.info("No git HEAD commit found -- Using current date and 'unknown' SHA")
+ (new Date, "unknown")
+ } else {
+ val commit = new RevWalk(db).parseCommit(head)
+ (new Date(commit.getCommitTime.toLong * 1000L), commit.getName.substring(0, 7))
+ }
+ } catch { case ex: Exception =>
+ log.error("Could not determine commit date + SHA: "+ex)
+ log.trace(ex)
+ (new Date, "unknown")
+ }
+ }
+ val date = {
+ val df = new SimpleDateFormat("yyyyMMdd-HHmmss", Locale.ENGLISH)
+ df.setTimeZone(TimeZone.getTimeZone("UTC"))
+ df.format(dateObj)
}
- val date = executeTool("get-scala-commit-date")
- val sha = executeTool("get-scala-commit-sha").substring(0, 7)
+ val Patch = """\d+\.\d+\.(\d+)""".r
+ def cross = base match {
+ case Patch(p) if p.toInt > 0 => "bin"
+ case _ => "pre"
+ }
val (canonicalV, mavenSuffix, osgiV, release) = suffix match {
- case "SNAPSHOT" => (s"$base-$date-$sha", s"-SNAPSHOT", s"$base.v$date-$sha", false)
- case "SHA-SNAPSHOT" => (s"$base-$date-$sha", s"-$sha-SNAPSHOT", s"$base.v$date-$sha", false)
- case "SHA-NIGHTLY" => (s"$base-$sha-nightly", s"-$sha-nightly", s"$base.v$date-NIGHTLY-$sha", true)
- case "" => (s"$base", "", s"$base.v$date-VFINAL-$sha", true)
- case suffix => (s"$base-$suffix", s"-$suffix", s"$base.v$date-$suffix-$sha", true)
+ case "SNAPSHOT" => (s"$base-$date-$sha", s"-$cross-SNAPSHOT", s"$base.v$date-$sha", false)
+ case "SHA-SNAPSHOT" => (s"$base-$date-$sha", s"-$cross-$sha-SNAPSHOT", s"$base.v$date-$sha", false)
+ case "SHA" => (s"$base-$sha", s"-$cross-$sha", s"$base.v$date-$sha", false)
+ case "" => (s"$base", "", s"$base.v$date-VFINAL-$sha", true)
+ case suffix => (s"$base-$suffix", s"-$suffix", s"$base.v$date-$suffix-$sha", true)
}
Versions(canonicalV, base, mavenSuffix, osgiV, sha, date, release)
diff --git a/project/plugins.sbt b/project/plugins.sbt
index 80aef2c591..58e2d6cdad 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -20,3 +20,12 @@ buildInfoKeys := Seq[BuildInfoKey](buildClasspath)
buildInfoPackage := "scalabuild"
libraryDependencies += "com.typesafe" %% "mima-reporter" % "0.1.13"
+
+libraryDependencies ++= Seq(
+ "org.eclipse.jgit" % "org.eclipse.jgit" % "4.6.0.201612231935-r",
+ "org.slf4j" % "slf4j-nop" % "1.7.23"
+)
+
+concurrentRestrictions in Global := Seq(
+ Tags.limitAll(1) // workaround for https://github.com/sbt/sbt/issues/2970
+)
diff --git a/scripts/common b/scripts/common
index cd9d874cf7..c68a80fd74 100644
--- a/scripts/common
+++ b/scripts/common
@@ -156,15 +156,16 @@ EOF
}
# Generate a repositories file with all allowed repositories in our build environment.
-# Takes one optional argument, the private repository URL.
+# Takes a variable number of additional repositories as argument.
# See http://www.scala-sbt.org/0.13/docs/Proxy-Repositories.html
function generateRepositoriesConfig() {
jcenterCacheUrl=${jcenterCacheUrl-"https://scala-ci.typesafe.com/artifactory/jcenter/"}
sbtRepositoryConfig="$scriptsDir/sbt-repositories-config"
echo > "$sbtRepositoryConfig" '[repositories]'
- if [ -n "$1" ]
- then
- echo >> "$sbtRepositoryConfig" " private-repo: $1"
+ if [[ $# -gt 0 ]]; then
+ for i in $(seq 1 $#); do
+ echo >> "$sbtRepositoryConfig" " script-repo-$i: ${!i}"
+ done
fi
cat >> "$sbtRepositoryConfig" << EOF
jcenter-cache: $jcenterCacheUrl
diff --git a/scripts/jobs/integrate/bootstrap b/scripts/jobs/integrate/bootstrap
index 71936abf71..a071f3c45f 100755
--- a/scripts/jobs/integrate/bootstrap
+++ b/scripts/jobs/integrate/bootstrap
@@ -3,16 +3,13 @@
# Script Overview
# - determine scala version
# - determine module versions
-# - build minimal core (aka locker) of Scala, use the determined version number, publish to private-repo
-# - build those modules where a binary compatible version doesn't exist, publish to private-repo
-# - build Scala using the previously built core and bootstrap modules, publish to private-repo (overwrites the minimal core version on private-repo)
-# - for releases (not nightlies)
+# - build minimal core (aka locker) of Scala, use the determined version number, publish to scala-release-temp
+# - build those modules where a binary compatible version doesn't exist, publish to scala-integration
+# - build Scala using the previously built core and bootstrap modules, publish to scala-integration
+# - for releases
# - stage Scala on sonatype
# - rebuild modules that needed a rebuild with this Scala build, and stage them on sonatype
-# - for nightlies
-# - force rebuild all modules and publish them locally (for testing purposes)
# - the Scala version is serialized to jenkins.properties, which is passed downstream to scala-release jobs
-# - this removes the need to tag scala/scala-dist (it's still encouraged for releases, but not a hard requirement)
# Specifying the Scala version:
@@ -21,21 +18,16 @@
# - Or have the current HEAD tagged as v$base$suffix
# - To prevent staging on sonatype (for testing), set publishToSonatype to anything but "yes"
# - Note: After building a release, the jenkins job provides an updated versions.properties file as artifact.
-# Put this file in the Scala repo and create a pull request, and also update the file build.number.
+# Put this file in the Scala repo and create a pull request, also update `baseVersion in Global` in build.sbt.
#
-# - Otherwise, a nightly release is built:
-# - version number is read from the build.number file, extended with -$sha-nightly
+# - Otherwise, an integration build is performed:
+# - version number is read from the build.sbt, extended with -[bin|pre]-$sha
-# Specifying module versions: there are two modes
-# - If moduleVersioning="versions.properties" (default): in this mode we use release versions for the modules.
-# - Module versions are read from the versions.properties file.
-# - Set <MODULE>_VER to override the default, e.g. XML_VER="1.0.4".
-# - The git revision is set to <MODULE>_REF="v$<MODULE>_VER". Make sure the tag exists (you can't override <MODULE>_REF).
-#
-# - Otherwise (moduleVersioning has some other value): in this mode we use nightly version numbers for modules.
-# - By default the script sets all <MODULE>_REF to "HEAD", override to build a specific revision.
-# - The <MODULE>_VER is set to a nightly version, for example "1.0.3-7-g14888a2-nightly" (you can't override <MODULE>_VER)
+# Specifying module versions. We use release versions for modules.
+# - Module versions are read from the versions.properties file.
+# - Set <MODULE>_VER to override the default, e.g. XML_VER="1.0.4".
+# - The git revision is set to <MODULE>_REF="v$<MODULE>_VER". Make sure the tag exists (you can't override <MODULE>_REF).
# Modules are automatically built if necessary.
@@ -56,7 +48,7 @@
# to be re-built using the 2.11.1 release, we could not use 2.11.0. We could also not release the modules
# after 2.11.1 was out, because that way the scala-library-all pom of 2.11.1 would depend on the old modules.
#
-# (*) https://github.com/sbt/sbt/blob/0.13.8/util/cross/src/main/input_sources/CrossVersionUtil.scala#L39
+# (*) https://github.com/sbt/sbt/blob/v0.13.13/util/cross/src/main/input_sources/CrossVersionUtil.scala#L41
# Binary incompatible changes in Modules: example with Scala 2.11 / 2.12 and scala-parser-combinators
@@ -82,8 +74,6 @@
# Note: private-repo used to be private-repo.typesafe.com. now we're running artifactory on scala-ci.typesafe.com/artifactory
-moduleVersioning=${moduleVersioning-"versions.properties"}
-
publishPrivateTask=${publishPrivateTask-"publish"}
publishSonatypeTaskCore=${publishSonatypeTaskCore-"publishSigned"}
publishSonatypeTaskModules=${publishSonatypeTaskModules-"publishSigned"}
@@ -108,10 +98,21 @@ mkdir -p $baseDir/ivy2
rm -rf $baseDir/resolutionScratch_
mkdir -p $baseDir/resolutionScratch_
-# repo used to publish "locker" scala to (to start the bootstrap)
-releaseTempRepoCred="private-repo"
+# repo for the starr and locker builds
releaseTempRepoUrl=${releaseTempRepoUrl-"https://scala-ci.typesafe.com/artifactory/scala-release-temp/"}
-generateRepositoriesConfig $releaseTempRepoUrl
+# repo for the modules and the quick build
+integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifactory/scala-integration/"}
+
+# the `releaseTempRepoUrl` needs to be in the repositories file to get starr when building quick and the modules.
+# `integrationRepoUrl` is there to find modules when building quick and other modules (e.g., partest requires xml).
+# the file is re-generated for running the stability test, this time with only `integrationRepoUrl`.
+generateRepositoriesConfig $releaseTempRepoUrl $integrationRepoUrl
+
+# ARGH trying to get this to work on multiple versions of sbt-extras...
+# the old version (on jenkins, and I don't want to upgrade for risk of breaking other builds) honors -sbt-dir
+# the new version of sbt-extras ignores sbt-dir, so we pass it in as -Dsbt.global.base
+# need to set sbt-dir to one that has the gpg.sbt plugin config
+sbtArgs="-ivy $baseDir/ivy2 -Dsbt.override.build.repos=true -Dsbt.repository.config=$sbtRepositoryConfig -Dsbt.global.base=$HOME/.sbt/0.13 -sbt-dir $HOME/.sbt/0.13"
##### git
gfxd() {
@@ -155,12 +156,7 @@ function st_stagingRepoClose() {
echo "{\"data\":{\"description\":\"$message\",\"stagedRepositoryIds\":[\"$repo\"]}}" | st_curl -X POST -d @- "$stApi/staging/bulk/close"
}
-
-# ARGH trying to get this to work on multiple versions of sbt-extras...
-# the old version (on jenkins, and I don't want to upgrade for risk of breaking other builds) honors -sbt-dir
-# the new version of sbt-extras ignores sbt-dir, so we pass it in as -Dsbt.global.base
-# need to set sbt-dir to one that has the gpg.sbt plugin config
-sbtArgs="-ivy $baseDir/ivy2 -Dsbt.override.build.repos=true -Dsbt.repository.config=$sbtRepositoryConfig -Dsbt.global.base=$HOME/.sbt/0.13 -sbt-dir $HOME/.sbt/0.13"
+#### sbt tools
sbtBuild() {
echo "### sbtBuild: "$SBT_CMD -no-colors $sbtArgs "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@"
@@ -184,15 +180,15 @@ sbtResolve() {
# then set the version to the right one and publish (which won't re-gen the docs).
# Also tried publish-local without docs using 'set publishArtifact in (Compile, packageDoc) := false' and republishing, no dice.
-# Each buildModule() function is invoked twice: first to build against locker and publish to private-repo, then
+# Each buildModule() function is invoked twice: first to build against locker and publish to artifactory, then
# to build against the release and publish to sonatype (or publish-local if publishToSonatype is not "yes").
-# In the second round, sbtResolve is always true: the module will be found in the private-repo!
+# In the second round, sbtResolve is always true: the module will be found in the artifactory!
# Therefore, if MODULE_BUILT is "yes" (in the second round), we know that we need to build (and publish) the
# module again.
#
-# Note: we tried an alternative solution in which sbtResolve would not look at private-repo, but that fails. For example,
+# Note: we tried an alternative solution in which sbtResolve would not look at artifactory, but that fails. For example,
# scala-xml depends on scala-library, so sbt tries to find the scala-library of the version that we are currently building,
-# which exists only in private-repo.
+# which exists only in artifactory.
docTask() {
if [[ "$STARR_REF" != "" && "$1" != "yes" ]]; then
@@ -226,7 +222,7 @@ buildPartest() {
fi
}
-# should only be called with publishTasks publishing to private-repo
+# should only be called with publishTasks publishing to artifactory
buildScalaCheck(){
if [ "$SCALACHECK_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scalacheck" "scalacheck" $SCALACHECK_VER )
then echo "Found scalacheck $SCALACHECK_VER; not building."
@@ -238,9 +234,9 @@ buildScalaCheck(){
fi
}
-# build modules, using ${buildTasks[@]} (except for ScalaCheck, which is hard-coded to publish to private-repo)
+# build modules, using ${buildTasks[@]} (except for ScalaCheck, which is hard-coded to publish to artifactory)
buildModules() {
- publishTasks=('set credentials += Credentials(Path.userHome / ".credentials-private-repo")' "set every publishTo := Some(\"private-repo\" at \"$releaseTempRepoUrl\")")
+ publishTasks=('set credentials += Credentials(Path.userHome / ".credentials-private-repo")' "set every publishTo := Some(\"publish-repo\" at \"$integrationRepoUrl\")")
buildTasks=($publishPrivateTask)
buildXML
# buildScalaCheck
@@ -267,20 +263,19 @@ scalaVerToBinary() {
local patch="$(echo $2 | sed -e "s#$RE#\3#")"
# The binary version is majMin (e.g. "2.12") if
- # - there's no suffix : 2.12.0, 2.12.1
- # - the suffix starts with "-bin": 2.12.0-bin-M1
- # - the patch version is > 0 : 2.12.1-M1, 1.12.3-RC2, 2.12.1-sha-nightly, 2.12.2-SNAPSHOT
+ # - there's no suffix : 2.12.0, 2.12.1
+ # - the suffix starts with "-bin" : 2.12.1-bin-sha, 2.12.1-bin-sha-custom, 2.12.1-bin-SNAPSHOT
+ # - the suffix is \w+ and patch version is > 0: 2.12.1-M1, 2.12.1-RC2 (also 2.12.1-sha, 2.12.1-SNAPSHOT, which we don't use)
#
- # Otherwise, the binary version is the full version: 2.12.0-M1, 2.12.0-RC2, 2.12.0-sha-nightly, 2.12.0-SNAPSHOT
+ # Otherwise, the binary version is the full version: 2.12.0-M1, 2.12.0-RC2, 2.12.0-pre-sha, 2.12.0-pre-SNAPSHOT
+ # (also 2.12.0-sha, 2.12.0-SNAPSHOT, which we don't use)
#
- # Adapted from sbt: https://github.com/sbt/sbt/blob/0.13.8/util/cross/src/main/input_sources/CrossVersionUtil.scala#L39
+ # Adapted from sbt: https://github.com/sbt/sbt/blob/v0.13.13/util/cross/src/main/input_sources/CrossVersionUtil.scala#L42
#
- # Note: during the pre-release cycle of a major release (e.g. before 2.12.0), the SCALA_BINARY_VER of nightly / SNAPSHOT
- # versions is the full version, e.g. 2.12.0-sha-nightly, so modules are always re-built. This is in line with what sbt
- # does: for example, with scalaVersion := "2.12.0-SNAPSHOT", sbt will resolve scala-xml as scala-xml_2.12.0-SNAPSHOT.
- # Once the 2.12.0 release is out, the binary version is 2.12 for all versions (e.g. for 2.12.1-sha-nightly).
+ # During the pre-release cycle of a major release (e.g. before 2.12.0), the SCALA_BINARY_VER of integration / SNAPSHOT
+ # versions is the full version, e.g. 2.12.0-pre-sha, so modules are always re-built.
- if [[ "$3" == "" || "${3:0:4}" == "-bin" || "$patch" != "0" ]]; then
+ if [[ "$3" == "" || "${3:0:4}" == "-bin" || ("$patch" != "0" && "$3" =~ ^-[a-zA-Z0-9_]+$) ]]; then
echo "$majMin"
else
echo "$1"
@@ -291,7 +286,7 @@ determineScalaVersion() {
cd $WORKSPACE
parseScalaProperties "versions.properties"
- # each of the branches below defines the following vars: SCALA_VER_BASE, SCALA_VER_SUFFIX, SCALADOC_SOURCE_LINKS_VER, publishToSonatype
+ # each of the branches below defines the following vars: SCALA_VER_BASE, SCALA_VER_SUFFIX, publishToSonatype
if [ -z "$SCALA_VER_BASE" ]; then
echo "No SCALA_VER_BASE specified."
@@ -299,12 +294,11 @@ determineScalaVersion() {
if [ -z "$scalaTag" ]
then
- echo "No tag found, building nightly snapshot."
- $SBT_CMD $sbtArgs 'set baseVersionSuffix in Global := "SHA-NIGHTLY"' generateBuildCharacterPropertiesFile
+ echo "No tag found, running an integration build."
+ $SBT_CMD $sbtArgs 'set baseVersionSuffix in Global := "SHA"' generateBuildCharacterPropertiesFile
parseScalaProperties "buildcharacter.properties"
SCALA_VER_BASE="$maven_version_base"
SCALA_VER_SUFFIX="$maven_version_suffix"
- SCALADOC_SOURCE_LINKS_VER=$(git rev-parse HEAD)
# TODO: publish nightly snapshot using this script - currently it's a separate jenkins job still running at EPFL.
publishToSonatype="no"
@@ -314,7 +308,6 @@ determineScalaVersion() {
local RE='v*\([0-9]*\)[.]\([0-9]*\)[.]\([0-9]*\)\([0-9A-Za-z-]*\)' # don't change this to make it more accurate, it's not worth it
SCALA_VER_BASE="$(echo $scalaTag | sed -e "s#$RE#\1.\2.\3#")"
SCALA_VER_SUFFIX="$(echo $scalaTag | sed -e "s#$RE#\4#")"
- SCALADOC_SOURCE_LINKS_VER=$scalaTag
if [ "$SCALA_VER_BASE" == "$scalaTag" ]; then
echo "Could not parse version $scalaTag"
@@ -324,8 +317,6 @@ determineScalaVersion() {
fi
else
publishToSonatype=${publishToSonatype-"yes"} # unless forced previously, publish
- # if version base/suffix are provided, we assume a corresponding tag exists for the scaladoc source links
- SCALADOC_SOURCE_LINKS_VER="v$SCALA_VER_BASE$SCALA_VER_SUFFIX"
fi
SCALA_VER="$SCALA_VER_BASE$SCALA_VER_SUFFIX"
@@ -339,39 +330,16 @@ determineScalaVersion() {
echo "Building Scala $SCALA_VER."
}
-deriveVersion() {
- update $1 $2 $3 &> /dev/null
- echo "$(git describe --tag --match=v* | cut -dv -f2)-nightly"
-}
-
-deriveVersionAnyTag() {
- update $1 $2 $3 &> /dev/null
- echo "$(git describe --tag | cut -dv -f2)-nightly"
-}
-
-# determineScalaVersion must have been called
+# determineScalaVersion must have been called (versions.properties is parsed to env vars)
deriveModuleVersions() {
- if [ "$moduleVersioning" == "versions.properties" ]; then
- # use versions.properties as defaults when no version specified on the command line
- XML_VER=${XML_VER-$scala_xml_version_number}
- PARTEST_VER=${PARTEST_VER-$partest_version_number}
- SCALACHECK_VER=${SCALACHECK_VER-$scalacheck_version_number}
-
- XML_REF="v$XML_VER"
- PARTEST_REF="v$PARTEST_VER"
- SCALACHECK_REF="$SCALACHECK_VER" # no `v` in their tags
- else
- # use HEAD as default when no revision is specified on the command line
- XML_REF=${XML_REF-"HEAD"}
- PARTEST_REF=${PARTEST_REF-"HEAD"}
- SCALACHECK_REF=${SCALACHECK_REF-"HEAD"}
-
- XML_VER=$(deriveVersion scala scala-xml "$XML_REF")
- PARTEST_VER=$(deriveVersion scala scala-partest "$PARTEST_REF")
- SCALACHECK_VER=$(deriveVersionAnyTag rickynils scalacheck "$SCALACHECK_REF")
- fi
+ XML_VER=${XML_VER-$scala_xml_version_number}
+ PARTEST_VER=${PARTEST_VER-$partest_version_number}
+ SCALACHECK_VER=${SCALACHECK_VER-$scalacheck_version_number}
+
+ XML_REF="v$XML_VER"
+ PARTEST_REF="v$PARTEST_VER"
+ SCALACHECK_REF="$SCALACHECK_VER" # no `v` in their tags
- echo "Module versions (versioning strategy: $moduleVersioning):"
echo "PARTEST = $PARTEST_VER at $PARTEST_REF"
# echo "SCALACHECK = $SCALACHECK_VER at $SCALACHECK_REF"
echo "XML = $XML_VER at $XML_REF"
@@ -385,30 +353,39 @@ createNetrcFile() {
grep 'password=' $1 | sed 's/password=\(.*\)/password \1/' >> $netrcFile
}
+# deletes existing artifacts (core and modules) matching the $SCALA_VER from the repository passed as argument
removeExistingBuilds() {
- createNetrcFile "$HOME/.credentials-private-repo"
- local netrcFile="$HOME/.credentials-private-repo-netrc"
-
- local storageApiUrl=`echo $releaseTempRepoUrl | sed 's/\(scala-release-temp\)/api\/storage\/\1/'`
- local scalaLangModules=`curl -s $storageApiUrl/org/scala-lang | jq -r '.children | .[] | "org/scala-lang" + .uri' | grep -v actors-migration`
-
- for module in $scalaLangModules; do
- local artifacts=`curl -s $storageApiUrl/$module | jq -r ".children | .[] | select(.uri | endswith(\"$SCALA_VER\")) | .uri"`
- for artifact in $artifacts; do
- echo "Deleting $releaseTempRepoUrl$module$artifact"
- curl -s --netrc-file $netrcFile -X DELETE $releaseTempRepoUrl$module$artifact
+ local repoUrl=$1
+ local repoPrefix="https://scala-ci.typesafe.com/artifactory/"
+ if [[ $repoUrl == "$repoPrefix"* ]]; then
+ local repoId=${1#$repoPrefix}
+ local storageApiUrl="${repoPrefix}api/storage/$repoId"
+
+ createNetrcFile "$HOME/.credentials-private-repo"
+ local netrcFile="$HOME/.credentials-private-repo-netrc"
+
+ # "module" is not a scala module (like scala-xml), but an artifact of a boostrap build. the variable
+ # contains: "org/scala-lang/modules", "org/scala-lang/scala-compiler", "org/scala-lang/scala-library", ...
+ local scalaLangModules=`curl -s $storageApiUrl/org/scala-lang | jq -r '.children | .[] | "org/scala-lang" + .uri' | grep -v actors-migration`
+
+ for module in $scalaLangModules; do
+ local artifacts=`curl -s $storageApiUrl/$module | jq -r ".children | .[] | select(.uri | endswith(\"$SCALA_VER\")) | .uri"`
+ for artifact in $artifacts; do
+ echo "Deleting $repoUrl$module$artifact"
+ curl -s --netrc-file $netrcFile -X DELETE $repoUrl$module$artifact
+ done
done
- done
+ else
+ echo "Unknown repo, not deleting anything: $repoUrl"
+ fi
}
constructUpdatedModuleVersions() {
updatedModuleVersions=()
- # force the new module versions for building the core. these may be different from the values in versions.properties,
- # either because the variables (XML_VER) were provided, or because we're building the modules from HEAD.
- # in the common case, the values are the same as in versions.properties.
+ # force the new module versions for building the core. these may be different from the values in versions.properties
+ # if the variables (XML_VER) were provided. in the common case, the values are the same as in versions.properties.
updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala-xml.version.number=$XML_VER")
-
updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dpartest.version.number=$PARTEST_VER")
# updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscalacheck.version.number=$SCALACHECK_VER")
@@ -418,7 +395,7 @@ constructUpdatedModuleVersions() {
if [ ! -z "$SCALA_BINARY_VER" ]; then updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala.binary.version=$SCALA_BINARY_VER"); fi
}
-# build locker (scala + modules) and quick, publishing everything to private-repo
+# build locker (scala + modules) and quick, publishing everything to artifactory
bootstrap() {
echo "### Bootstrapping"
@@ -429,7 +406,7 @@ bootstrap() {
echo "### Building STARR"
STARR_DIR=./scala-starr
- STARR_VER_SUFFIX="-$(git rev-parse --short $STARR_REF)-nightly"
+ STARR_VER_SUFFIX="-$(git rev-parse --short $STARR_REF)-starr"
STARR_VER=$SCALA_VER_BASE$STARR_VER_SUFFIX
rm -rf "$STARR_DIR"
(
@@ -472,8 +449,6 @@ bootstrap() {
# Rebuild Scala with these modules so that all binary versions are consistent.
# Update versions.properties to new modules.
# Sanity check: make sure the Scala test suite passes / docs can be generated with these modules.
- # don't skip locker (-Dlocker.skip=1), or stability will fail
- # overwrite "locker" version of scala at private-repo with bootstrapped version
cd $baseDir
rm -rf build/
@@ -481,7 +456,7 @@ bootstrap() {
--warn \
-Dstarr.version=$SCALA_VER \
${updatedModuleVersions[@]} \
- "setupBootstrapQuick $releaseTempRepoUrl $SCALA_VER" \
+ "setupBootstrapQuick $integrationRepoUrl $SCALA_VER" \
$clean \
$sbtBuildTask \
dist/mkQuick \
@@ -506,7 +481,7 @@ testStability() {
--warn \
-Dstarr.version=$SCALA_VER \
${updatedModuleVersions[@]} \
- "setupBootstrapQuick $releaseTempRepoUrl $SCALA_VER" \
+ "setupBootstrapQuick $integrationRepoUrl $SCALA_VER" \
$clean \
dist/mkQuick
mv build/quick build/strap
@@ -526,7 +501,7 @@ publishSonatype() {
--warn \
-Dstarr.version=$SCALA_VER \
${updatedModuleVersions[@]} \
- "setupBootstrapPublish $releaseTempRepoUrl $SCALA_VER" \
+ "setupBootstrapPublish $integrationRepoUrl $SCALA_VER" \
$publishSonatypeTaskCore
echo "### Publishing modules to sonatype"
@@ -554,10 +529,14 @@ determineScalaVersion
deriveModuleVersions
-removeExistingBuilds
+removeExistingBuilds $integrationRepoUrl
+removeExistingBuilds $releaseTempRepoUrl
bootstrap
+# for stability testing and sonatype publishing, use artifacts in `integrationRepoUrl`
+generateRepositoriesConfig $integrationRepoUrl
+
if [ "$testStability" == "yes" ]
then testStability
fi
diff --git a/spec/04-basic-declarations-and-definitions.md b/spec/04-basic-declarations-and-definitions.md
index 53b34dedc5..c4d3425fff 100644
--- a/spec/04-basic-declarations-and-definitions.md
+++ b/spec/04-basic-declarations-and-definitions.md
@@ -669,6 +669,15 @@ def f(a: Int = 0)(b: Int = a + 1) = b // OK
f(10)() // returns 11 (not 1)
```
+If an [implicit argument](07-implicits.html#implicit-parameters)
+is not found by implicit search, it may be supplied using a default argument.
+
+```scala
+implicit val i: Int = 2
+def f(implicit x: Int, s: String = "hi") = s * x
+f // "hihi"
+```
+
### By-Name Parameters
```ebnf
diff --git a/spec/05-classes-and-objects.md b/spec/05-classes-and-objects.md
index 6738c7a5b7..5bd520589d 100644
--- a/spec/05-classes-and-objects.md
+++ b/spec/05-classes-and-objects.md
@@ -597,10 +597,12 @@ overridden in subclasses. A `final` class may not be inherited by
a template. `final` is redundant for object definitions. Members
of final classes or objects are implicitly also final, so the
`final` modifier is generally redundant for them, too. Note, however, that
-[constant value definitions](04-basic-declarations-and-definitions.html#value-declarations-and-definitions) do require
-an explicit `final` modifier, even if they are defined in a final class or
-object. `final` may not be applied to incomplete members, and it may not be
-combined in one modifier list with `sealed`.
+[constant value definitions](04-basic-declarations-and-definitions.html#value-declarations-and-definitions)
+do require an explicit `final` modifier,
+even if they are defined in a final class or object.
+`final` is permitted for abstract classes
+but it may not be applied to traits or incomplete members,
+and it may not be combined in one modifier list with `sealed`.
### `sealed`
The `sealed` modifier applies to class definitions. A
diff --git a/spec/06-expressions.md b/spec/06-expressions.md
index 48cff1725a..581170c5f9 100644
--- a/spec/06-expressions.md
+++ b/spec/06-expressions.md
@@ -320,7 +320,7 @@ would not typecheck.
### Named and Default Arguments
-If an application might uses named arguments $p = e$ or default
+If an application is to use named arguments $p = e$ or default
arguments, the following conditions must hold.
- For every named argument $p_i = e_i$ which appears left of a positional argument
@@ -330,7 +330,7 @@ arguments, the following conditions must hold.
argument defines a parameter which is already specified by a
positional argument.
- Every formal parameter $p_j:T_j$ which is not specified by either a positional
- or a named argument has a default argument.
+ or named argument has a default argument.
If the application uses named or default
arguments the following transformation is applied to convert it into
diff --git a/spec/README.md b/spec/README.md
index b19ce6441f..ad524dfdf3 100644
--- a/spec/README.md
+++ b/spec/README.md
@@ -8,7 +8,11 @@ Third, we'd like to support different output formats. An html page per chapter w
## Editing
-We use Jekyll 2 and [Redcarpet](https://github.com/vmg/redcarpet) to generate the html. Essentially, this is what github pages use.
+At the time of writing we are using Jekyll 3.3.0 and [Redcarpet 3.3.2](https://github.com/vmg/redcarpet) to generate the html.
+
+Check `Gemfile` for the current versions.
+
+We aim to track the configuration GitHub Pages use but at times differences will arise as GitHub Pages evolves.
## Building
diff --git a/spec/_config.yml b/spec/_config.yml
index 60e80ee05c..1a67f7de63 100644
--- a/spec/_config.yml
+++ b/spec/_config.yml
@@ -1,7 +1,7 @@
baseurl: /files/archive/spec/2.12
safe: true
lsi: false
-highlighter: null
+highlighter: false
markdown: redcarpet
encoding: utf-8
redcarpet:
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index c1b0733895..819887f959 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -953,10 +953,11 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
definitions.isDefinitionsInitialized
&& rootMirror.isMirrorInitialized
)
- override def isPastTyper = (
+ override def isPastTyper = isPast(currentRun.typerPhase)
+ def isPast(phase: Phase) = (
(curRun ne null)
&& isGlobalInitialized // defense against init order issues
- && (globalPhase.id > currentRun.typerPhase.id)
+ && (globalPhase.id > phase.id)
)
// TODO - trim these to the absolute minimum.
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 226c49ec07..3ed1570c1c 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -983,6 +983,8 @@ trait Scanners extends ScannersCommon {
def intVal: Long = intVal(negated = false)
+ private val zeroFloat = raw"[0.]+(?:[eE][+-]?[0-9]+)?[fFdD]?".r
+
/** Convert current strVal, base to float value.
*/
def floatVal(negated: Boolean): Float = {
@@ -990,8 +992,7 @@ trait Scanners extends ScannersCommon {
val value: Float = java.lang.Float.parseFloat(strVal)
if (value > Float.MaxValue)
syntaxError("floating point number too large")
- val zeroly = "0.fF"
- if (value == 0.0f && strVal.exists(c => !zeroly.contains(c)))
+ if (value == 0.0f && !zeroFloat.pattern.matcher(strVal).matches)
syntaxError("floating point number too small")
if (negated) -value else value
} catch {
@@ -1010,8 +1011,7 @@ trait Scanners extends ScannersCommon {
val value: Double = java.lang.Double.parseDouble(strVal)
if (value > Double.MaxValue)
syntaxError("double precision floating point number too large")
- val zeroly = "0.dD"
- if (value == 0.0d && strVal.exists(c => !zeroly.contains(c)))
+ if (value == 0.0d && !zeroFloat.pattern.matcher(strVal).matches)
syntaxError("double precision floating point number too small")
if (negated) -value else value
} catch {
diff --git a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
index dfd5b07a3b..c18f220d95 100644
--- a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
+++ b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
@@ -446,9 +446,10 @@ abstract class ScalaPrimitives {
inform(s"Unknown primitive method $cls.$method")
else alts foreach (s =>
addPrimitive(s,
- s.info.paramTypes match {
- case tp :: _ if code == ADD && tp =:= StringTpe => CONCAT
- case _ => code
+ if (code != ADD) code
+ else exitingTyper(s.info).paramTypes match {
+ case tp :: _ if tp =:= StringTpe => CONCAT
+ case _ => code
}
)
)
diff --git a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala
index 1ea152b29c..fbd59eb04a 100644
--- a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala
+++ b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala
@@ -10,9 +10,10 @@ import java.util.function.IntFunction
import java.util
import java.util.Comparator
-import scala.reflect.io.{AbstractFile, PlainFile}
+import scala.reflect.io.{AbstractFile, PlainFile, PlainNioFile}
import scala.tools.nsc.util.{ClassPath, ClassRepresentation}
import FileUtils._
+import scala.collection.JavaConverters._
/**
* A trait allowing to look for classpath entries in directories. It provides common logic for
@@ -121,51 +122,78 @@ trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends Directo
def asClassPathStrings: Seq[String] = Seq(dir.getPath)
}
-object JImageDirectoryLookup {
- import java.nio.file._, java.net.URI, scala.collection.JavaConverters._
- def apply(): List[ClassPath] = {
+object JrtClassPath {
+ import java.nio.file._, java.net.URI
+ def apply(): Option[ClassPath] = {
try {
val fs = FileSystems.getFileSystem(URI.create("jrt:/"))
- val dir: Path = fs.getPath("/modules")
- val modules = Files.list(dir).iterator().asScala.toList
- modules.map(m => new JImageDirectoryLookup(fs, m.getFileName.toString))
+ Some(new JrtClassPath(fs))
} catch {
case _: ProviderNotFoundException | _: FileSystemNotFoundException =>
- Nil
+ None
}
}
}
-class JImageDirectoryLookup(fs: java.nio.file.FileSystem, module: String) extends DirectoryLookup[ClassFileEntryImpl] with NoSourcePaths {
+
+/**
+ * Implementation `ClassPath` based on the JDK 9 encapsulated runtime modules (JEP-220)
+ *
+ * https://bugs.openjdk.java.net/browse/JDK-8066492 is the most up to date reference
+ * for the structure of the jrt:// filesystem.
+ *
+ * The implementation assumes that no classes exist in the empty package.
+ */
+final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with NoSourcePaths {
import java.nio.file.Path, java.nio.file._
type F = Path
- val dir: Path = fs.getPath("/modules/" + module)
+ private val dir: Path = fs.getPath("/packages")
- protected def emptyFiles: Array[Path] = Array.empty
- protected def getSubDir(packageDirName: String): Option[Path] = {
- val packageDir = dir.resolve(packageDirName)
- if (Files.exists(packageDir) && Files.isDirectory(packageDir)) Some(packageDir)
- else None
+ // e.g. "java.lang" -> Seq("/modules/java.base")
+ private val packageToModuleBases: Map[String, Seq[Path]] = {
+ val ps = Files.newDirectoryStream(dir).iterator().asScala
+ def lookup(pack: Path): Seq[Path] = {
+ Files.list(pack).iterator().asScala.map(l => if (Files.isSymbolicLink(l)) Files.readSymbolicLink(l) else l).toList
+ }
+ ps.map(p => (p.toString.stripPrefix("/packages/"), lookup(p))).toMap
}
- protected def listChildren(dir: Path, filter: Option[Path => Boolean]): Array[Path] = {
- import scala.collection.JavaConverters._
- val f = filter.getOrElse((p: Path) => true)
- Files.list(dir).iterator().asScala.filter(f).toArray[Path]
+
+ override private[nsc] def packages(inPackage: String): Seq[PackageEntry] = {
+ def matches(packageDottedName: String) =
+ if (packageDottedName.contains("."))
+ packageOf(packageDottedName) == inPackage
+ else inPackage == ""
+ packageToModuleBases.keysIterator.filter(matches).map(PackageEntryImpl(_)).toVector
+ }
+ private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = {
+ if (inPackage == "") Nil
+ else {
+ packageToModuleBases.getOrElse(inPackage, Nil).flatMap(x =>
+ Files.list(x.resolve(inPackage.replace('.', '/'))).iterator().asScala.filter(_.getFileName.toString.endsWith(".class"))).map(x =>
+ ClassFileEntryImpl(new PlainNioFile(x))).toVector
+ }
}
- protected def getName(f: Path): String = f.getFileName.toString
- protected def toAbstractFile(f: Path): AbstractFile = new scala.reflect.io.PlainNioFile(f)
- protected def isPackage(f: Path): Boolean = Files.isDirectory(f) && mayBeValidPackage(f.getFileName.toString)
+
+ override private[nsc] def list(inPackage: String): ClassPathEntries =
+ if (inPackage == "") ClassPathEntries(packages(inPackage), Nil)
+ else ClassPathEntries(packages(inPackage), classes(inPackage))
def asURLs: Seq[URL] = Seq(dir.toUri.toURL)
- def asClassPathStrings: Seq[String] = asURLs.map(_.toString)
+ // We don't yet have a scheme to represent the JDK modules in our `-classpath`.
+ // java models them as entries in the new "module path", we'll probably need to follow this.
+ def asClassPathStrings: Seq[String] = Nil
def findClassFile(className: String): Option[AbstractFile] = {
- val relativePath = FileUtils.dirPath(className) + ".class"
- val classFile = dir.resolve(relativePath)
- if (Files.exists(classFile)) Some(new scala.reflect.io.PlainNioFile(classFile)) else None
+ if (!className.contains(".")) None
+ else {
+ val inPackage = packageOf(className)
+ packageToModuleBases.getOrElse(inPackage, Nil).iterator.flatMap{x =>
+ val file = x.resolve(className.replace('.', '/') + ".class")
+ if (Files.exists(file)) new scala.reflect.io.PlainNioFile(file) :: Nil else Nil
+ }.take(1).toList.headOption
+ }
}
- override protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file)
- override protected def isMatchingFile(f: Path): Boolean = Files.isRegularFile(f) && f.getFileName.toString.endsWith(".class")
- override private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = files(inPackage)
+ private def packageOf(dottedClassName: String): String =
+ dottedClassName.substring(0, dottedClassName.lastIndexOf("."))
}
case class DirectoryClassPath(dir: File) extends JFileDirectoryLookup[ClassFileEntryImpl] with NoSourcePaths {
diff --git a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
index 99263bf834..f1f5f37c36 100644
--- a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
@@ -14,8 +14,10 @@ import StringOps.{countElementsAsString => countAs, trimAllTrailingSpace => trim
/** This class implements a Reporter that displays messages on a text console.
*/
-class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: PrintWriter) extends AbstractReporter {
- def this(settings: Settings) = this(settings, Console.in, new PrintWriter(Console.err, true))
+class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: PrintWriter, echoWriter: PrintWriter) extends AbstractReporter {
+ def this(settings: Settings) = this(settings, Console.in, new PrintWriter(Console.err, true), new PrintWriter(Console.out, true))
+ def this(settings: Settings, reader: BufferedReader, writer: PrintWriter) =
+ this(settings, reader, writer, writer)
/** Whether a short file name should be displayed before errors */
var shortname: Boolean = false
@@ -41,6 +43,12 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr
writer.flush()
}
+ /** Prints the message to the echoWriter, which is usually stdout. */
+ override def echo(msg: String): Unit = {
+ echoWriter.println(trimTrailing(msg))
+ echoWriter.flush()
+ }
+
/** Prints the message with the given position indication. */
def printMessage(posIn: Position, msg: String): Unit = printMessage(formatMessage(posIn, msg, shortname))
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
index 0b051ef89d..c38de753c8 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
@@ -127,7 +127,7 @@ abstract class ScalaBuild extends Ordered[ScalaBuild] {
def unparse: String
}
/**
- * A development, test, nightly, snapshot or other "unofficial" build
+ * A development, test, integration, snapshot or other "unofficial" build
*/
case class Development(id: String) extends ScalaBuild {
def unparse = s"-${id}"
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index d948d151a6..dd44366692 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -122,11 +122,16 @@ abstract class SymbolLoaders {
* and give them `completer` as type.
*/
def enterClassAndModule(root: Symbol, name: String, getCompleter: (ClassSymbol, ModuleSymbol) => SymbolLoader) {
- val clazz = newClass(root, name)
- val module = newModule(root, name)
- val completer = getCompleter(clazz, module)
- enterClass(root, clazz, completer)
- enterModule(root, module, completer)
+ val clazz0 = newClass(root, name)
+ val module0 = newModule(root, name)
+ val completer = getCompleter(clazz0, module0)
+ // enterClass/Module may return an existing symbol instead of the ones we created above
+ // this may happen when there's both sources and binaries on the classpath, but the class
+ // name is different from the file name, so the classpath can't match the binary and source
+ // representation. `companionModule/Class` prefers the source version, so we should be careful
+ // to reuse the symbols returned below.
+ val clazz = enterClass(root, clazz0, completer)
+ val module = enterModule(root, module0, completer)
if (!clazz.isAnonymousClass) {
// Diagnostic for SI-7147
def msg: String = {
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index dcffd7a6ab..f35dd6556f 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -342,12 +342,16 @@ abstract class UnCurry extends InfoTransform
* the whole tree with it.
*/
private def replaceElidableTree(tree: Tree): Tree = {
+ def elisionOf(t: Type): Tree = t.typeSymbol match {
+ case StringClass => Literal(Constant("")) setType t
+ case _ => gen.mkZero(t)
+ }
tree match {
case DefDef(_,_,_,_,_,rhs) =>
- val rhs1 = if (rhs == EmptyTree) rhs else Block(Nil, gen.mkZero(rhs.tpe)) setType rhs.tpe
+ val rhs1 = if (rhs == EmptyTree) rhs else Block(Nil, elisionOf(rhs.tpe)) setType rhs.tpe
deriveDefDef(tree)(_ => rhs1) setSymbol tree.symbol setType tree.tpe
case _ =>
- gen.mkZero(tree.tpe) setType tree.tpe
+ elisionOf(tree.tpe)
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index d349597b14..7a3b8d2ab6 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -62,11 +62,10 @@ trait Contexts { self: Analyzer =>
def warnUnusedImports(unit: CompilationUnit) = if (!unit.isJava) {
for (imps <- allImportInfos.remove(unit)) {
- for (imp <- imps.reverse.distinct) {
+ for (imp <- imps.distinct.reverse) {
val used = allUsedSelectors(imp)
- def isMask(s: ImportSelector) = s.name != nme.WILDCARD && s.rename == nme.WILDCARD
- imp.tree.selectors filterNot (s => isMask(s) || used(s)) foreach { sel =>
+ imp.tree.selectors filterNot (s => isMaskImport(s) || used(s)) foreach { sel =>
reporter.warning(imp posOf sel, "Unused import")
}
}
@@ -74,6 +73,10 @@ trait Contexts { self: Analyzer =>
}
}
+ def isMaskImport(s: ImportSelector): Boolean = s.name != nme.WILDCARD && s.rename == nme.WILDCARD
+ def isIndividualImport(s: ImportSelector): Boolean = s.name != nme.WILDCARD && s.rename != nme.WILDCARD
+ def isWildcardImport(s: ImportSelector): Boolean = s.name == nme.WILDCARD
+
var lastAccessCheckDetails: String = ""
/** List of symbols to import from in a root context. Typically that
@@ -1193,27 +1196,33 @@ trait Contexts { self: Analyzer =>
res
}
- final def lookupCompanionOf(original: Symbol): Symbol = {
- if (original.isModuleClass) original.sourceModule
- else lookupScopeEntry(original) match {
- case null => NoSymbol
- case entry => entry.owner.lookupCompanion(original)
+ final def lookupCompanionInIncompleteOwner(original: Symbol): Symbol = {
+ /* Search scopes in current and enclosing contexts for the definition of `symbol` */
+ def lookupScopeEntry(symbol: Symbol): ScopeEntry = {
+ var res: ScopeEntry = null
+ var ctx = this
+ while (res == null && ctx.outer != ctx) {
+ val s = ctx.scope lookupSymbolEntry symbol
+ if (s != null)
+ res = s
+ else
+ ctx = ctx.outer
+ }
+ res
}
- }
- /** Search scopes in current and enclosing contexts for the definition of `symbol` */
- private def lookupScopeEntry(symbol: Symbol): ScopeEntry = {
- var res: ScopeEntry = null
- var ctx = this
- while (res == null && ctx.outer != ctx) {
- val s = ctx.scope lookupSymbolEntry symbol
- if (s != null)
- res = s
- else
- ctx = ctx.outer
+ // 1) Must be owned by the same Scope, to ensure that in
+ // `{ class C; { ...; object C } }`, the class is not seen as a companion of the object.
+ // 2) Must be a class and module symbol, so that `{ class C; def C }` or `{ type T; object T }` are not companions.
+ lookupScopeEntry(original) match {
+ case null => NoSymbol
+ case entry =>
+ def isCompanion(sym: Symbol): Boolean =
+ (original.isModule && sym.isClass || sym.isModule && original.isClass) && sym.isCoDefinedWith(original)
+ entry.owner.lookupNameInSameScopeAs(original, original.name.companionName).filter(isCompanion)
}
- res
}
+
} //class Context
/** A `Context` focussed on an `Import` tree */
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 395bda234b..28169c9da1 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -1955,9 +1955,12 @@ trait Namers extends MethodSynthesis {
// Doing this generally would trigger cycles; that's what we also
// use the lower-level scan through the current Context as a fall back.
if (!currentRun.compiles(owner)) owner.initialize
- original.companionSymbol orElse {
- ctx.lookupCompanionOf(original)
- }
+
+ if (original.isModuleClass) original.sourceModule
+ else if (!owner.isTerm && owner.hasCompleteInfo)
+ original.companionSymbol
+ else
+ ctx.lookupCompanionInIncompleteOwner(original)
}
/** A version of `Symbol#linkedClassOfClass` that works with local companions, ala `companionSymbolOf`. */
diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
index 990edcd86d..50743a922a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
@@ -112,7 +112,7 @@ abstract class TreeCheckers extends Analyzer {
else if (prevTrees exists (t => (t eq tree) || (t.symbol == sym)))
()
else {
- val s1 = (prevTrees map wholetreestr).sorted.distinct
+ val s1 = (prevTrees map wholetreestr).distinct.sorted
val s2 = wholetreestr(tree)
if (s1 contains s2) ()
else movedMsgs += ("\n** %s moved:\n** Previously:\n%s\n** Currently:\n%s".format(ownerstr(sym), s1 mkString ", ", s2))
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index b66dbf21c0..36b9a65334 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -11,6 +11,7 @@ import scala.collection.mutable.ListBuffer
import scala.util.control.Exception.ultimately
import symtab.Flags._
import PartialFunction._
+import scala.annotation.tailrec
/** An interface to enable higher configurability of diagnostic messages
* regarding type errors. This is barely a beginning as error messages are
@@ -274,19 +275,54 @@ trait TypeDiagnostics {
if (AnyRefTpe <:< req) notAnyRefMessage(found) else ""
}
+ def finalOwners(tpe: Type): Boolean = (tpe.prefix == NoPrefix) || recursivelyFinal(tpe)
+
+ @tailrec
+ final def recursivelyFinal(tpe: Type): Boolean = {
+ val prefix = tpe.prefix
+ if (prefix != NoPrefix) {
+ if (prefix.typeSymbol.isFinal) {
+ recursivelyFinal(prefix)
+ } else {
+ false
+ }
+ } else {
+ true
+ }
+ }
+
// TODO - figure out how to avoid doing any work at all
// when the message will never be seen. I though context.reportErrors
// being false would do that, but if I return "<suppressed>" under
// that condition, I see it.
def foundReqMsg(found: Type, req: Type): String = {
- def baseMessage = (
- ";\n found : " + found.toLongString + existentialContext(found) + explainAlias(found) +
- "\n required: " + req + existentialContext(req) + explainAlias(req)
- )
- ( withDisambiguation(Nil, found, req)(baseMessage)
- + explainVariance(found, req)
- + explainAnyVsAnyRef(found, req)
- )
+ val foundWiden = found.widen
+ val reqWiden = req.widen
+ val sameNamesDifferentPrefixes =
+ foundWiden.typeSymbol.name == reqWiden.typeSymbol.name &&
+ foundWiden.prefix.typeSymbol != reqWiden.prefix.typeSymbol
+ val easilyMistakable =
+ sameNamesDifferentPrefixes &&
+ !req.typeSymbol.isConstant &&
+ finalOwners(foundWiden) && finalOwners(reqWiden) &&
+ !found.typeSymbol.isTypeParameterOrSkolem && !req.typeSymbol.isTypeParameterOrSkolem
+
+ if (easilyMistakable) {
+ val longestNameLength = foundWiden.nameAndArgsString.length max reqWiden.nameAndArgsString.length
+ val paddedFoundName = foundWiden.nameAndArgsString.padTo(longestNameLength, ' ')
+ val paddedReqName = reqWiden.nameAndArgsString.padTo(longestNameLength, ' ')
+ ";\n found : " + (paddedFoundName + s" (in ${found.prefix.typeSymbol.fullNameString}) ") + explainAlias(found) +
+ "\n required: " + (paddedReqName + s" (in ${req.prefix.typeSymbol.fullNameString}) ") + explainAlias(req)
+ } else {
+ def baseMessage = {
+ ";\n found : " + found.toLongString + existentialContext(found) + explainAlias(found) +
+ "\n required: " + req + existentialContext(req) + explainAlias(req)
+ }
+ (withDisambiguation(Nil, found, req)(baseMessage)
+ + explainVariance(found, req)
+ + explainAnyVsAnyRef(found, req)
+ )
+ }
}
def typePatternAdvice(sym: Symbol, ptSym: Symbol) = {
@@ -315,14 +351,6 @@ trait TypeDiagnostics {
def restoreName() = sym.name = savedName
def modifyName(f: String => String) = sym setName newTypeName(f(sym.name.toString))
- /** Prepend java.lang, scala., or Predef. if this type originated
- * in one of those.
- */
- def qualifyDefaultNamespaces() = {
- val intersect = Set(trueOwner, aliasOwner) intersect UnqualifiedOwners
- if (intersect.nonEmpty && tp.typeSymbolDirect.name == tp.typeSymbol.name) preQualify()
- }
-
// functions to manipulate the name
def preQualify() = modifyName(trueOwner.fullName + "." + _)
def postQualify() = if (!(postQualifiedWith contains trueOwner)) { postQualifiedWith ::= trueOwner; modifyName(_ + "(in " + trueOwner + ")") }
@@ -414,12 +442,6 @@ trait TypeDiagnostics {
if (td1 string_== td2)
tds foreach (_.nameQualify())
- // If they have the same simple name, and either of them is in the
- // scala package or predef, qualify with scala so it is not confusing why
- // e.g. java.util.Iterator and Iterator are different types.
- if (td1 name_== td2)
- tds foreach (_.qualifyDefaultNamespaces())
-
// If they still print identically:
// a) If they are type parameters with different owners, append (in <owner>)
// b) Failing that, the best we can do is append "(some other)" to the latter.
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 837ccf7e06..8333d5d295 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -3333,15 +3333,15 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
def handleOverloaded = {
val undetparams = context.undetparams
- def funArgTypes(tps: List[Type]) = tps.map { tp =>
- val relTp = tp.asSeenFrom(pre, fun.symbol.owner)
+ def funArgTypes(tpAlts: List[(Type, Symbol)]) = tpAlts.map { case (tp, alt) =>
+ val relTp = tp.asSeenFrom(pre, alt.owner)
val argTps = functionOrSamArgTypes(relTp)
//println(s"funArgTypes $argTps from $relTp")
argTps.map(approximateAbstracts)
}
- def functionProto(argTps: List[Type]): Type =
- try functionType(funArgTypes(argTps).transpose.map(lub), WildcardType)
+ def functionProto(argTpWithAlt: List[(Type, Symbol)]): Type =
+ try functionType(funArgTypes(argTpWithAlt).transpose.map(lub), WildcardType)
catch { case _: IllegalArgumentException => WildcardType }
// To propagate as much information as possible to typedFunction, which uses the expected type to
@@ -3355,21 +3355,21 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// do not receive special treatment: they are typed under WildcardType.)
val altArgPts =
if (settings.isScala212 && args.exists(treeInfo.isFunctionMissingParamType))
- try alts.map(alt => formalTypes(alt.info.paramTypes, argslen)).transpose // do least amount of work up front
+ try alts.map(alt => formalTypes(alt.info.paramTypes, argslen).map(ft => (ft, alt))).transpose // do least amount of work up front
catch { case _: IllegalArgumentException => args.map(_ => Nil) } // fail safe in case formalTypes fails to align to argslen
else args.map(_ => Nil) // will type under argPt == WildcardType
val (args1, argTpes) = context.savingUndeterminedTypeParams() {
val amode = forArgMode(fun, mode)
- map2(args, altArgPts) { (arg, argPts) =>
+ map2(args, altArgPts) { (arg, argPtAlts) =>
def typedArg0(tree: Tree) = {
// if we have an overloaded HOF such as `(f: Int => Int)Int <and> (f: Char => Char)Char`,
// and we're typing a function like `x => x` for the argument, try to collapse
// the overloaded type into a single function type from which `typedFunction`
// can derive the argument type for `x` in the function literal above
val argPt =
- if (argPts.nonEmpty && treeInfo.isFunctionMissingParamType(tree)) functionProto(argPts)
+ if (argPtAlts.nonEmpty && treeInfo.isFunctionMissingParamType(tree)) functionProto(argPtAlts)
else WildcardType
val argTyped = typedArg(tree, amode, BYVALmode, argPt)
@@ -4666,19 +4666,20 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val qual1 = typedQualifier(qual)
if (treeInfo.isVariableOrGetter(qual1)) {
if (Statistics.canEnable) Statistics.stopTimer(failedOpEqNanos, opeqStart)
- val erred = qual1.isErroneous || args.exists(_.isErroneous)
+ val erred = qual1.exists(_.isErroneous) || args.exists(_.isErroneous)
if (erred) reportError(error) else {
val convo = convertToAssignment(fun, qual1, name, args)
silent(op = _.typed1(convo, mode, pt)) match {
case SilentResultValue(t) => t
- case err: SilentTypeError => reportError(SilentTypeError(advice1(convo, error.errors, err), error.warnings))
+ case err: SilentTypeError => reportError(
+ SilentTypeError(advice1(convo, error.errors, err), error.warnings)
+ )
}
}
- }
- else {
+ } else {
if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart)
val Apply(Select(qual2, _), args2) = tree
- val erred = qual2.isErroneous || args2.exists(_.isErroneous)
+ val erred = qual2.exists(_.isErroneous) || args2.exists(_.isErroneous)
reportError {
if (erred) error else SilentTypeError(advice2(error.errors), error.warnings)
}
diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala
index 188cabbc8d..f845656980 100644
--- a/src/compiler/scala/tools/util/PathResolver.scala
+++ b/src/compiler/scala/tools/util/PathResolver.scala
@@ -234,7 +234,7 @@ final class PathResolver(settings: Settings) {
// Assemble the elements!
def basis = List[Traversable[ClassPath]](
- JImageDirectoryLookup.apply(), // 0. The Java 9 classpath (backed by the jrt:/ virtual system)
+ JrtClassPath.apply(), // 0. The Java 9 classpath (backed by the jrt:/ virtual system, if available)
classesInPath(javaBootClassPath), // 1. The Java bootstrap class path.
contentsOfDirsInPath(javaExtDirs), // 2. The Java extension class path.
classesInExpandedPath(javaUserClassPath), // 3. The Java application class path.
diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala
index 8c4115b1dd..0d8799282f 100644
--- a/src/library/scala/collection/mutable/FlatHashTable.scala
+++ b/src/library/scala/collection/mutable/FlatHashTable.scala
@@ -47,9 +47,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
@transient protected var seedvalue: Int = tableSizeSeed
- import HashTable.powerOfTwo
-
- protected def capacity(expectedSize: Int) = if (expectedSize == 0) 1 else powerOfTwo(expectedSize)
+ protected def capacity(expectedSize: Int) = HashTable.nextPositivePowerOfTwo(expectedSize)
/** The initial size of the hash table.
*/
diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala
index 445217ebef..01ec1defad 100644
--- a/src/library/scala/collection/mutable/HashTable.scala
+++ b/src/library/scala/collection/mutable/HashTable.scala
@@ -12,7 +12,7 @@ package scala
package collection
package mutable
-import java.lang.Integer.rotateRight
+import java.lang.Integer.{numberOfLeadingZeros, rotateRight}
import scala.util.hashing.byteswap32
/** This class can be used to construct data structures that are based
@@ -405,7 +405,7 @@ private[collection] object HashTable {
private[collection] final def sizeForThreshold(_loadFactor: Int, thr: Int) = ((thr.toLong * loadFactorDenum) / _loadFactor).toInt
- private[collection] final def capacity(expectedSize: Int) = if (expectedSize == 0) 1 else powerOfTwo(expectedSize)
+ private[collection] final def capacity(expectedSize: Int) = nextPositivePowerOfTwo(expectedSize)
trait HashUtils[KeyType] {
protected final def sizeMapBucketBitSize = 5
@@ -433,16 +433,7 @@ private[collection] object HashTable {
/**
* Returns a power of two >= `target`.
*/
- private[collection] def powerOfTwo(target: Int): Int = {
- /* See http://bits.stephan-brumme.com/roundUpToNextPowerOfTwo.html */
- var c = target - 1
- c |= c >>> 1
- c |= c >>> 2
- c |= c >>> 4
- c |= c >>> 8
- c |= c >>> 16
- c + 1
- }
+ private[collection] def nextPositivePowerOfTwo(target: Int): Int = 1 << -numberOfLeadingZeros(target - 1)
class Contents[A, Entry >: Null <: HashEntry[A, Entry]](
val loadFactor: Int,
diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala
index ca08f475ce..b2e9ee27b9 100644
--- a/src/library/scala/collection/mutable/OpenHashMap.scala
+++ b/src/library/scala/collection/mutable/OpenHashMap.scala
@@ -31,8 +31,6 @@ object OpenHashMap {
final private class OpenEntry[Key, Value](var key: Key,
var hash: Int,
var value: Option[Value])
-
- private[mutable] def nextPositivePowerOfTwo(i : Int) = 1 << (32 - Integer.numberOfLeadingZeros(i - 1))
}
/** A mutable hash map based on an open hashing scheme. The precise scheme is
@@ -67,7 +65,7 @@ extends AbstractMap[Key, Value]
override def empty: OpenHashMap[Key, Value] = OpenHashMap.empty[Key, Value]
- private[this] val actualInitialSize = OpenHashMap.nextPositivePowerOfTwo(initialSize)
+ private[this] val actualInitialSize = HashTable.nextPositivePowerOfTwo(initialSize)
private var mask = actualInitialSize - 1
diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala
index 7b21351cf6..29a635fcbe 100644
--- a/src/library/scala/util/Properties.scala
+++ b/src/library/scala/util/Properties.scala
@@ -154,6 +154,12 @@ private[scala] trait PropertiesTrait {
/* Some runtime values. */
private[scala] def isAvian = javaVmName contains "Avian"
+ private[scala] def coloredOutputEnabled: Boolean = propOrElse("scala.color", "auto") match {
+ case "auto" => System.console() != null && !isWin
+ case a if a.toLowerCase() == "true" => true
+ case _ => false
+ }
+
// This is looking for javac, tools.jar, etc.
// Tries JDK_HOME first, then the more common but likely jre JAVA_HOME,
// and finally the system property based javaHome.
diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
index 0ef52213e5..1cdefff2e9 100644
--- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
+++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
@@ -33,6 +33,9 @@ trait BaseTypeSeqs {
protected def newBaseTypeSeq(parents: List[Type], elems: Array[Type]) =
new BaseTypeSeq(parents, elems)
+ protected def newMappedBaseTypeSeq(orig: BaseTypeSeq, f: Type => Type) =
+ new MappedBaseTypeSeq(orig, f)
+
/** Note: constructor is protected to force everyone to use the factory method newBaseTypeSeq instead.
* This is necessary because when run from reflection every base type sequence needs to have a
* SynchronizedBaseTypeSeq as mixin.
@@ -125,7 +128,7 @@ trait BaseTypeSeqs {
newBaseTypeSeq(parents, arr)
}
- def lateMap(f: Type => Type): BaseTypeSeq = new MappedBaseTypeSeq(this, f)
+ def lateMap(f: Type => Type): BaseTypeSeq = newMappedBaseTypeSeq(this, f)
def exists(p: Type => Boolean): Boolean = elems exists p
diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala
index 9d39ef8b42..055f7c9d5b 100644
--- a/src/reflect/scala/reflect/internal/Names.scala
+++ b/src/reflect/scala/reflect/internal/Names.scala
@@ -296,11 +296,13 @@ trait Names extends api.Names {
*/
final def pos(s: String, start: Int): Int = {
var i = pos(s.charAt(0), start)
- while (i + s.length() <= len) {
+ val sLen = s.length()
+ if (sLen == 1) return i
+ while (i + sLen <= len) {
var j = 1
while (s.charAt(j) == chrs(index + i + j)) {
j += 1
- if (j == s.length()) return i
+ if (j == sLen) return i
}
i = pos(s.charAt(0), i + 1)
}
diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala
index 51fb31d36d..0435a2c1cf 100644
--- a/src/reflect/scala/reflect/internal/Scopes.scala
+++ b/src/reflect/scala/reflect/internal/Scopes.scala
@@ -291,25 +291,6 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
null
}
- final def lookupCompanion(original: Symbol): Symbol = {
- lookupSymbolEntry(original) match {
- case null =>
- case entry =>
- var e = lookupEntry(original.name.companionName)
- while (e != null) {
- // 1) Must be owned by the same Scope, to ensure that in
- // `{ class C; { ...; object C } }`, the class is not seen as a companion of the object.
- // 2) Must be a class and module symbol, so that `{ class C; def C }` or `{ type T; object T }` are not companions.
- def isClassAndModule(sym1: Symbol, sym2: Symbol) = sym1.isClass && sym2.isModule
- if ((e.owner eq entry.owner) && (isClassAndModule(original, e.sym) || isClassAndModule(e.sym, original))) {
- return if (e.sym.isCoDefinedWith(original)) e.sym else NoSymbol
- }
- e = lookupNextEntry(e)
- }
- }
- NoSymbol
- }
-
/** lookup a symbol entry matching given name.
* @note from Martin: I believe this is a hotspot or will be one
* in future versions of the type system. I have reverted the previous
@@ -345,6 +326,20 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
e
}
+ final def lookupNameInSameScopeAs(original: Symbol, companionName: Name): Symbol = {
+ lookupSymbolEntry(original) match {
+ case null =>
+ case entry =>
+ var e = lookupEntry(companionName)
+ while (e != null) {
+ if (e.owner eq entry.owner) return e.sym
+ e = lookupNextEntry(e)
+ }
+ }
+ NoSymbol
+ }
+
+
/** TODO - we can test this more efficiently than checking isSubScope
* in both directions. However the size test might be enough to quickly
* rule out most failures.
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index 890a5796e9..9d71136fc5 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -809,7 +809,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def isDerivedValueClass =
isClass && !hasFlag(PACKAGE | TRAIT) &&
- info.firstParent.typeSymbol == AnyValClass && !isPrimitiveValueClass
+ !phase.erasedTypes && info.firstParent.typeSymbol == AnyValClass && !isPrimitiveValueClass
final def isMethodWithExtension =
isMethod && owner.isDerivedValueClass && !isParamAccessor && !isConstructor && !hasFlag(SUPERACCESSOR) && !isMacro && !isSpecialized
diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala
index 1aef30819a..933afbea2b 100644
--- a/src/reflect/scala/reflect/internal/TreeInfo.scala
+++ b/src/reflect/scala/reflect/internal/TreeInfo.scala
@@ -274,6 +274,7 @@ abstract class TreeInfo {
def mayBeVarGetter(sym: Symbol): Boolean = sym.info match {
case NullaryMethodType(_) => sym.owner.isClass && !sym.isStable
case PolyType(_, NullaryMethodType(_)) => sym.owner.isClass && !sym.isStable
+ case PolyType(_, mt @ MethodType(_, _))=> mt.isImplicit && sym.owner.isClass && !sym.isStable
case mt @ MethodType(_, _) => mt.isImplicit && sym.owner.isClass && !sym.isStable
case _ => false
}
diff --git a/src/reflect/scala/reflect/internal/TypeDebugging.scala b/src/reflect/scala/reflect/internal/TypeDebugging.scala
index e9050b4e33..58359e66d9 100644
--- a/src/reflect/scala/reflect/internal/TypeDebugging.scala
+++ b/src/reflect/scala/reflect/internal/TypeDebugging.scala
@@ -59,7 +59,7 @@ trait TypeDebugging {
object typeDebug {
import scala.Console._
- private val colorsOk = sys.props contains "scala.color"
+ private val colorsOk = scala.util.Properties.coloredOutputEnabled
private def inColor(s: String, color: String) = if (colorsOk && s != "") color + s + RESET else s
private def inBold(s: String, color: String) = if (colorsOk && s != "") color + BOLD + s + RESET else s
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index b46f071717..dc12ef9352 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -967,6 +967,8 @@ trait Types
*/
def directObjectString = safeToString
+ def nameAndArgsString = typeSymbol.name.toString
+
/** A test whether a type contains any unification type variables.
* Overridden with custom logic except where trivially true.
*/
@@ -2321,6 +2323,8 @@ trait Types
private def preString = if (needsPreString) pre.prefixString else ""
private def argsString = if (args.isEmpty) "" else args.mkString("[", ",", "]")
+ override def nameAndArgsString = typeSymbol.name.toString + argsString
+
private def refinementDecls = fullyInitializeScope(decls) filter (sym => sym.isPossibleInRefinement && sym.isPublic)
private def refinementString = (
if (sym.isStructuralRefinement)
@@ -2728,6 +2732,19 @@ trait Types
arg.toString
}
+ override def nameAndArgsString: String = underlying match {
+ case TypeRef(_, sym, args) if !settings.debug && isRepresentableWithWildcards =>
+ sym.name + wildcardArgsString(quantified.toSet, args).mkString("[", ",", "]")
+ case TypeRef(_, sym, args) =>
+ sym.name + args.mkString("[", ",", "]") + existentialClauses
+ case _ => underlying.typeSymbol.name + existentialClauses
+ }
+
+ private def existentialClauses = {
+ val str = quantified map (_.existentialToString) mkString (" forSome { ", "; ", " }")
+ if (settings.explaintypes) "(" + str + ")" else str
+ }
+
/** An existential can only be printed with wildcards if:
* - the underlying type is a typeref
* - every quantified variable appears at most once as a type argument and
@@ -2746,7 +2763,7 @@ trait Types
tpe.typeSymbol.isRefinementClass && (tpe.parents exists isQuantified)
}
val (wildcardArgs, otherArgs) = args partition (arg => qset contains arg.typeSymbol)
- wildcardArgs.distinct == wildcardArgs &&
+ wildcardArgs.toSet.size == wildcardArgs.size &&
!(otherArgs exists (arg => isQuantified(arg))) &&
!(wildcardArgs exists (arg => isQuantified(arg.typeSymbol.info.bounds))) &&
!(qset contains sym) &&
@@ -2756,17 +2773,13 @@ trait Types
}
override def safeToString: String = {
- def clauses = {
- val str = quantified map (_.existentialToString) mkString (" forSome { ", "; ", " }")
- if (settings.explaintypes) "(" + str + ")" else str
- }
underlying match {
case TypeRef(pre, sym, args) if !settings.debug && isRepresentableWithWildcards =>
"" + TypeRef(pre, sym, Nil) + wildcardArgsString(quantified.toSet, args).mkString("[", ", ", "]")
case MethodType(_, _) | NullaryMethodType(_) | PolyType(_, _) =>
- "(" + underlying + ")" + clauses
+ "(" + underlying + ")" + existentialClauses
case _ =>
- "" + underlying + clauses
+ "" + underlying + existentialClauses
}
}
diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
index 08ccac8069..b4152c9b8c 100644
--- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
+++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
@@ -377,7 +377,7 @@ abstract class UnPickler {
def readThisType(): Type = {
val sym = readSymbolRef() match {
- case stub: StubSymbol => stub.setFlag(PACKAGE)
+ case stub: StubSymbol => stub.setFlag(PACKAGE | MODULE)
case sym => sym
}
ThisType(sym)
diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala
index 24f8aa88e6..07ae71538c 100644
--- a/src/reflect/scala/reflect/internal/transform/Erasure.scala
+++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala
@@ -161,6 +161,11 @@ trait Erasure {
}
if (newParents eq parents) tp
else ClassInfoType(newParents, decls, clazz)
+
+ // can happen while this map is being used before erasure (e.g. when reasoning about sam types)
+ // the regular mapOver will cause a class cast exception because TypeBounds don't erase to TypeBounds
+ case _: BoundedWildcardType => tp // skip
+
case _ =>
mapOver(tp)
}
diff --git a/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala b/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala
index 3cede1b3c5..49ab0cb30e 100644
--- a/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala
+++ b/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala
@@ -92,7 +92,7 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader)
}
}
- private val packages = mutable.Map[String, Package]()
+ private[this] val packages = mutable.Map[String, Package]()
override def definePackage(name: String, specTitle: String, specVersion: String, specVendor: String, implTitle: String, implVersion: String, implVendor: String, sealBase: URL): Package = {
throw new UnsupportedOperationException()
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedOps.scala b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
index f0d96e0fd6..eadafc8abb 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
@@ -18,6 +18,12 @@ private[reflect] trait SynchronizedOps extends internal.SymbolTable
if (elems.exists(_.isInstanceOf[RefinedType])) new BaseTypeSeq(parents, elems) with SynchronizedBaseTypeSeq
else new BaseTypeSeq(parents, elems)
+ override protected def newMappedBaseTypeSeq(orig: BaseTypeSeq, f: Type => Type) =
+ // MappedBaseTypeSeq's are used rarely enough that we unconditionally mixin the synchronized
+ // wrapper, rather than doing this conditionally. A previous attempt to do that broke the "late"
+ // part of the "lateMap" contract in inspecting the mapped elements.
+ new MappedBaseTypeSeq(orig, f) with SynchronizedBaseTypeSeq
+
trait SynchronizedBaseTypeSeq extends BaseTypeSeq {
override def apply(i: Int): Type = gilSynchronized { super.apply(i) }
override def rawElem(i: Int) = gilSynchronized { super.rawElem(i) }
@@ -28,11 +34,6 @@ private[reflect] trait SynchronizedOps extends internal.SymbolTable
override def exists(p: Type => Boolean): Boolean = gilSynchronized { super.exists(p) }
override lazy val maxDepth = gilSynchronized { maxDepthOfElems }
override def toString = gilSynchronized { super.toString }
-
- override def lateMap(f: Type => Type): BaseTypeSeq =
- // only need to synchronize BaseTypeSeqs if they contain refined types
- if (map(f).toList.exists(_.isInstanceOf[RefinedType])) new MappedBaseTypeSeq(this, f) with SynchronizedBaseTypeSeq
- else new MappedBaseTypeSeq(this, f)
}
// Scopes
diff --git a/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala
index 01e3a90950..f68705211f 100644
--- a/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala
@@ -13,6 +13,12 @@ trait ExprTyper {
import global.{ reporter => _, Import => _, _ }
import naming.freshInternalVarName
+ private def doInterpret(code: String): IR.Result = {
+ // interpret/interpretSynthetic may change the phase, which would have unintended effects on types.
+ val savedPhase = phase
+ try interpretSynthetic(code) finally phase = savedPhase
+ }
+
def symbolOfLine(code: String): Symbol = {
def asExpr(): Symbol = {
val name = freshInternalVarName()
@@ -21,7 +27,7 @@ trait ExprTyper {
// behind a def and strip the NullaryMethodType which wraps the expr.
val line = "def " + name + " = " + code
- interpretSynthetic(line) match {
+ doInterpret(line) match {
case IR.Success =>
val sym0 = symbolOfTerm(name)
// drop NullaryMethodType
@@ -32,7 +38,7 @@ trait ExprTyper {
def asDefn(): Symbol = {
val old = repl.definedSymbolList.toSet
- interpretSynthetic(code) match {
+ doInterpret(code) match {
case IR.Success =>
repl.definedSymbolList filterNot old match {
case Nil => NoSymbol
@@ -43,7 +49,7 @@ trait ExprTyper {
}
}
def asError(): Symbol = {
- interpretSynthetic(code)
+ doInterpret(code)
NoSymbol
}
beSilentDuring(asExpr()) orElse beSilentDuring(asDefn()) orElse asError()
@@ -72,7 +78,7 @@ trait ExprTyper {
def asProperType(): Option[Type] = {
val name = freshInternalVarName()
val line = "def %s: %s = ???" format (name, typeString)
- interpretSynthetic(line) match {
+ doInterpret(line) match {
case IR.Success =>
val sym0 = symbolOfTerm(name)
Some(sym0.asMethod.returnType)
diff --git a/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
index d6c0dafaf2..f455e71476 100644
--- a/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
+++ b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
@@ -213,29 +213,40 @@ trait MemberHandlers {
class ImportHandler(imp: Import) extends MemberHandler(imp) {
val Import(expr, selectors) = imp
+
def targetType = intp.global.rootMirror.getModuleIfDefined("" + expr) match {
case NoSymbol => intp.typeOfExpression("" + expr)
- case sym => sym.thisType
+ case sym => sym.tpe
}
- private def importableTargetMembers = importableMembers(targetType).toList
- // wildcard imports, e.g. import foo._
- private def selectorWild = selectors filter (_.name == nme.USCOREkw)
- // renamed imports, e.g. import foo.{ bar => baz }
- private def selectorRenames = selectors map (_.rename) filterNot (_ == null)
+
+ private def isFlattenedSymbol(sym: Symbol) =
+ sym.owner.isPackageClass &&
+ sym.name.containsName(nme.NAME_JOIN_STRING) &&
+ sym.owner.info.member(sym.name.take(sym.name.indexOf(nme.NAME_JOIN_STRING))) != NoSymbol
+
+ private def importableTargetMembers =
+ importableMembers(exitingTyper(targetType)).filterNot(isFlattenedSymbol).toList
+
+ // non-wildcard imports
+ private def individualSelectors = selectors filter analyzer.isIndividualImport
/** Whether this import includes a wildcard import */
- val importsWildcard = selectorWild.nonEmpty
+ val importsWildcard = selectors exists analyzer.isWildcardImport
def implicitSymbols = importedSymbols filter (_.isImplicit)
def importedSymbols = individualSymbols ++ wildcardSymbols
- private val selectorNames = selectorRenames filterNot (_ == nme.USCOREkw) flatMap (_.bothNames) toSet
- lazy val individualSymbols: List[Symbol] = exitingTyper(importableTargetMembers filter (m => selectorNames(m.name)))
- lazy val wildcardSymbols: List[Symbol] = exitingTyper(if (importsWildcard) importableTargetMembers else Nil)
+ lazy val importableSymbolsWithRenames = {
+ val selectorRenameMap = individualSelectors.flatMap(x => x.name.bothNames zip x.rename.bothNames).toMap
+ importableTargetMembers flatMap (m => selectorRenameMap.get(m.name) map (m -> _))
+ }
+
+ lazy val individualSymbols: List[Symbol] = importableSymbolsWithRenames map (_._1)
+ lazy val wildcardSymbols: List[Symbol] = if (importsWildcard) importableTargetMembers else Nil
/** Complete list of names imported by a wildcard */
lazy val wildcardNames: List[Name] = wildcardSymbols map (_.name)
- lazy val individualNames: List[Name] = individualSymbols map (_.name)
+ lazy val individualNames: List[Name] = importableSymbolsWithRenames map (_._2)
/** The names imported by this statement */
override lazy val importedNames: List[Name] = wildcardNames ++ individualNames
diff --git a/src/repl/scala/tools/nsc/interpreter/ReplProps.scala b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala
index f3115d9800..a86069f198 100644
--- a/src/repl/scala/tools/nsc/interpreter/ReplProps.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala
@@ -17,7 +17,7 @@ class ReplProps {
private def int(name: String) = Prop[Int](name)
// This property is used in TypeDebugging. Let's recycle it.
- val colorOk = bool("scala.color")
+ val colorOk = Properties.coloredOutputEnabled
val info = bool("scala.repl.info")
val debug = bool("scala.repl.debug")
diff --git a/src/repl/scala/tools/nsc/interpreter/Scripted.scala b/src/repl/scala/tools/nsc/interpreter/Scripted.scala
index 6aef486957..8d87d98e53 100644
--- a/src/repl/scala/tools/nsc/interpreter/Scripted.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Scripted.scala
@@ -331,7 +331,7 @@ class WriterOutputStream(writer: Writer) extends OutputStream {
byteBuffer.flip()
val result = decoder.decode(byteBuffer, charBuffer, /*eoi=*/ false)
if (byteBuffer.remaining == 0) byteBuffer.clear()
- if (charBuffer.position > 0) {
+ if (charBuffer.position() > 0) {
charBuffer.flip()
writer write charBuffer.toString
charBuffer.clear()
diff --git a/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala b/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala
index 7f4e52e88d..2524fb75fb 100644
--- a/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala
@@ -74,9 +74,8 @@ object EntityLink {
def unapply(el: EntityLink): Option[(Inline, LinkTo)] = Some((el.title, el.link))
}
final case class HtmlTag(data: String) extends Inline {
- private val Pattern = """(?ms)\A<(/?)(.*?)[\s>].*\z""".r
private val (isEnd, tagName) = data match {
- case Pattern(s1, s2) =>
+ case HtmlTag.Pattern(s1, s2) =>
(! s1.isEmpty, Some(s2.toLowerCase))
case _ =>
(false, None)
@@ -86,8 +85,13 @@ final case class HtmlTag(data: String) extends Inline {
isEnd && tagName == open.tagName
}
+ def close = tagName collect {
+ case name if !HtmlTag.TagsNotToClose(name) && !data.endsWith(s"</$name>") => HtmlTag(s"</$name>")
+ }
+}
+object HtmlTag {
+ private val Pattern = """(?ms)\A<(/?)(.*?)[\s>].*\z""".r
private val TagsNotToClose = Set("br", "img")
- def close = tagName collect { case name if !TagsNotToClose(name) => HtmlTag(s"</$name>") }
}
/** The summary of a comment, usually its first sentence. There must be exactly one summary per body. */
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/Page.scala b/src/scaladoc/scala/tools/nsc/doc/html/Page.scala
index c720c4939f..a84f77919d 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/Page.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/Page.scala
@@ -7,7 +7,7 @@ package scala
package tools.nsc.doc.html
import scala.tools.nsc.doc.model._
-import scala.tools.nsc.doc.base.comment
+import scala.tools.nsc.doc.base.comment._
import java.io.{FileOutputStream, File}
import scala.reflect.NameTransformer
import java.nio.channels.Channels
@@ -106,16 +106,21 @@ abstract class Page {
case dtpl: DocTemplateEntity => dtpl.companion.isDefined
case _ => false
}
+}
- protected def inlineToStr(inl: comment.Inline): String = inl match {
- case comment.Chain(items) => items flatMap (inlineToStr(_)) mkString ""
- case comment.Italic(in) => inlineToStr(in)
- case comment.Bold(in) => inlineToStr(in)
- case comment.Underline(in) => inlineToStr(in)
- case comment.Monospace(in) => inlineToStr(in)
- case comment.Text(text) => text
- case comment.Summary(in) => inlineToStr(in)
- case comment.EntityLink(comment.Text(text), _) => text
- case _ => inl.toString
+object Page {
+ def inlineToStr(inl: Inline): String = inl match {
+ case Chain(items) => items flatMap (inlineToStr(_)) mkString ""
+ case Italic(in) => inlineToStr(in)
+ case Bold(in) => inlineToStr(in)
+ case Underline(in) => inlineToStr(in)
+ case Superscript(in) => inlineToStr(in)
+ case Subscript(in) => inlineToStr(in)
+ case Link(raw, title) => inlineToStr(title)
+ case Monospace(in) => inlineToStr(in)
+ case Text(text) => text
+ case Summary(in) => inlineToStr(in)
+ case HtmlTag(tag) => "<[^>]*>".r.replaceAllIn(tag, "")
+ case EntityLink(in, _) => inlineToStr(in)
}
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala
index 54bf42bbd5..fb2bf5049f 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala
@@ -90,13 +90,13 @@ trait EntityPage extends HtmlPage {
mbr match {
case dtpl: DocTemplateEntity =>
dtpl.companion.fold(<span class="separator"></span>) { c: DocTemplateEntity =>
- <a class="object" href={relativeLinkTo(c)} title={c.comment.fold("")(com => inlineToStr(com.short))}></a>
+ <a class="object" href={relativeLinkTo(c)} title={c.comment.fold("")(com => Page.inlineToStr(com.short))}></a>
}
case _ => <span class="separator"></span>
}
}
- <a class={mbr.kind} href={relativeLinkTo(mbr)} title={mbr.comment.fold("")(com => inlineToStr(com.short))}></a>
- <a href={relativeLinkTo(mbr)} title={mbr.comment.fold("")(com => inlineToStr(com.short))}>
+ <a class={mbr.kind} href={relativeLinkTo(mbr)} title={mbr.comment.fold("")(com => Page.inlineToStr(com.short))}></a>
+ <a href={relativeLinkTo(mbr)} title={mbr.comment.fold("")(com => Page.inlineToStr(com.short))}>
{mbr.name}
</a>
</li>
@@ -897,7 +897,7 @@ trait EntityPage extends HtmlPage {
}
}
if (!nameLink.isEmpty)
- <a title={mbr.comment.fold("")(c => inlineToStr(c.short))} href={nameLink}>
+ <a title={mbr.comment.fold("")(c => Page.inlineToStr(c.short))} href={nameLink}>
{nameHtml}
</a>
else nameHtml
@@ -1065,7 +1065,7 @@ trait EntityPage extends HtmlPage {
body.blocks flatMap (blockToStr(_)) mkString ""
private def blockToStr(block: comment.Block): String = block match {
- case comment.Paragraph(in) => inlineToStr(in)
+ case comment.Paragraph(in) => Page.inlineToStr(in)
case _ => block.toString
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala
index 8f58a7b845..28304e76c7 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala
@@ -87,7 +87,7 @@ class IndexScript(universe: doc.Universe) extends Page {
/** Gets the short description i.e. the first sentence of the docstring */
def shortDesc(mbr: MemberEntity): String = mbr.comment.fold("") { c =>
- inlineToStr(c.short).replaceAll("\n", "")
+ Page.inlineToStr(c.short).replaceAll("\n", "")
}
/** Returns the json representation of the supplied members */
diff --git a/test/files/neg/no-predef.check b/test/files/neg/no-predef.check
index a63d8c5ba5..f5c2e82fe1 100644
--- a/test/files/neg/no-predef.check
+++ b/test/files/neg/no-predef.check
@@ -1,11 +1,11 @@
no-predef.scala:2: error: type mismatch;
- found : scala.Long(5L)
- required: java.lang.Long
+ found : Long (in scala)
+ required: Long (in java.lang)
def f1 = 5L: java.lang.Long
^
no-predef.scala:3: error: type mismatch;
- found : java.lang.Long
- required: scala.Long
+ found : Long (in java.lang)
+ required: Long (in scala)
def f2 = new java.lang.Long(5) : Long
^
no-predef.scala:4: error: value map is not a member of String
diff --git a/test/files/neg/t10207.check b/test/files/neg/t10207.check
new file mode 100755
index 0000000000..3330db44a5
--- /dev/null
+++ b/test/files/neg/t10207.check
@@ -0,0 +1,4 @@
+t10207.scala:14: error: too many arguments (2) for method apply: (key: Int)scala.collection.mutable.ArrayBuffer[String] in trait MapLike
+ m(1, (_ => empty)) ++= AB("eins", "uno")
+ ^
+one error found
diff --git a/test/files/neg/t10207.scala b/test/files/neg/t10207.scala
new file mode 100644
index 0000000000..2dfc5d75c9
--- /dev/null
+++ b/test/files/neg/t10207.scala
@@ -0,0 +1,16 @@
+
+// Was:
+// warning: an unexpected type representation reached the compiler backend
+// Now:
+// error: too many arguments (2) for method apply: (key: Int)scala.collection.mutable.ArrayBuffer[String] in trait MapLike
+
+trait Test {
+ import collection.mutable.{Map=>MMap, ArrayBuffer=>AB}
+
+ val m = MMap((1 -> AB("one")))
+
+ val empty = AB[String]()
+
+ m(1, (_ => empty)) ++= AB("eins", "uno")
+}
+
diff --git a/test/files/neg/t2102.check b/test/files/neg/t2102.check
index b4f91a5319..6f70839d22 100644
--- a/test/files/neg/t2102.check
+++ b/test/files/neg/t2102.check
@@ -1,6 +1,6 @@
t2102.scala:2: error: type mismatch;
- found : java.util.Iterator[Int]
- required: scala.collection.Iterator[_]
+ found : Iterator[Int] (in java.util)
+ required: Iterator[_] (in scala.collection)
val x: Iterator[_] = new java.util.ArrayList[Int]().iterator
^
one error found
diff --git a/test/files/neg/type-diagnostics.check b/test/files/neg/type-diagnostics.check
index c5e6dec3f8..fd327bcb66 100644
--- a/test/files/neg/type-diagnostics.check
+++ b/test/files/neg/type-diagnostics.check
@@ -1,6 +1,6 @@
type-diagnostics.scala:4: error: type mismatch;
- found : scala.collection.Set[String]
- required: scala.collection.immutable.Set[String]
+ found : Set[String] (in scala.collection)
+ required: Set[String] (in scala.collection.immutable)
def f = Calculator("Hello", binding.keySet: collection.Set[String])
^
type-diagnostics.scala:13: error: type mismatch;
diff --git a/test/files/pos/overloaded_ho_fun.scala b/test/files/pos/overloaded_ho_fun.scala
index 2699ad35f8..17176715f0 100644
--- a/test/files/pos/overloaded_ho_fun.scala
+++ b/test/files/pos/overloaded_ho_fun.scala
@@ -49,3 +49,18 @@ object sorting {
// def andThen[C](g: Bijection[B, C]): Bijection[A, C] = ???
// def compose[T](g: Bijection[T, A]) = g andThen this
// }
+
+object SI10194 {
+ trait X[A] {
+ def map[B](f: A => B): Unit
+ }
+
+ trait Y[A] extends X[A] {
+ def map[B](f: A => B)(implicit ordering: Ordering[B]): Unit
+ }
+
+ trait Z[A] extends Y[A]
+
+ (null: Y[Int]).map(x => x.toString) // compiled
+ (null: Z[Int]).map(x => x.toString) // didn't compile
+}
diff --git a/test/files/pos/sam_erasure_boundedwild.scala b/test/files/pos/sam_erasure_boundedwild.scala
new file mode 100644
index 0000000000..1ec27e0ea4
--- /dev/null
+++ b/test/files/pos/sam_erasure_boundedwild.scala
@@ -0,0 +1,11 @@
+class Test {
+ trait Q[T] {
+ def toArray[T](x: Array[T]): Array[T]
+ def toArray(): Array[T]
+ }
+
+ def crashTyper: Array[_] = {
+ val x : Q[_] = ???
+ x.toArray // crashes while doing overload resolution
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t4237.scala b/test/files/pos/t4237.scala
index fcf6eb8bf1..3f605607b2 100644
--- a/test/files/pos/t4237.scala
+++ b/test/files/pos/t4237.scala
@@ -2,5 +2,16 @@ class A {
(new { def field = 0; def field_=(i: Int) = () }).field = 5 // compiles as expected
(new { def field(implicit i: Int) = 0; def field_=(i: Int) = () }).field = 5 // compiles even with implicit params on getter
(new { def field = 0; def field_=[T](i: Int) = () }).field = 5 // compiles with type param on setter
- (new { def field[T] = 0; def field_=(i: Int) = () }).field = 5 // DOESN'T COMPILE
-} \ No newline at end of file
+ (new { def field[T] = 0; def field_=(i: Int) = () }).field = 5 // DIDN'T COMPILE
+
+ class Imp
+ implicit val imp: Imp = new Imp
+ implicit val implicitList: List[Int] = null
+
+ // compiles even with implicit params on setter
+ (new { def field(implicit i: Int) = 0; def field_=(i: Int)(implicit j: Imp) = () }).field = 5
+ (new { def field(implicit i: Int) = 0; def field_=[T <: Imp](i: Int)(implicit j: T) = () }).field = 5
+ // was reassignment to val
+ (new { def field[T](implicit ts: List[T]) = 0; def field_=[T](i: Int)(implicit ts: List[T]) = () }).field = 5
+ (new { def field[T](implicit ts: List[T]) = 0; def field_=[T](i: T)(implicit ts: List[T]) = () }).field = 5
+}
diff --git a/test/files/run/elidable-opt.check b/test/files/run/elidable-opt.check
index 88cf98e0d1..969b9a420a 100644
--- a/test/files/run/elidable-opt.check
+++ b/test/files/run/elidable-opt.check
@@ -11,4 +11,4 @@ false
0
0.0
0.0
-null
+
diff --git a/test/files/run/elidable.check b/test/files/run/elidable.check
index 88cf98e0d1..969b9a420a 100644
--- a/test/files/run/elidable.check
+++ b/test/files/run/elidable.check
@@ -11,4 +11,4 @@ false
0
0.0
0.0
-null
+
diff --git a/test/files/run/elidable.scala b/test/files/run/elidable.scala
index 02785972bb..fed1c7b392 100644
--- a/test/files/run/elidable.scala
+++ b/test/files/run/elidable.scala
@@ -3,31 +3,36 @@ import elidable._
// runs -Xelide-below WARNING or 900
+object Fail {
+ def fail(msg: String): Unit = throw new IllegalStateException(s"Expected failure: $msg")
+}
+import Fail.fail
+
trait T {
@elidable(FINEST) def f1()
@elidable(SEVERE) def f2()
- @elidable(FINEST) def f3() = assert(false, "Should have been elided.")
+ @elidable(FINEST) def f3() = fail("Should have been elided.")
def f4()
}
class C extends T {
def f1() = println("Good for me, I was not elided. C.f1")
def f2() = println("Good for me, I was not elided. C.f2")
- @elidable(FINEST) def f4() = assert(false, "Should have been elided.")
+ @elidable(FINEST) def f4() = fail("Should have been elided.")
}
object O {
- @elidable(FINEST) def f1() = assert(false, "Should have been elided.")
- @elidable(INFO) def f2() = assert(false, "Should have been elided.")
+ @elidable(FINEST) def f1() = fail("Should have been elided.")
+ @elidable(INFO) def f2() = fail("Should have been elided.")
@elidable(SEVERE) def f3() = println("Good for me, I was not elided. O.f3")
- @elidable(INFO) def f4 = assert(false, "Should have been elided (no parens).")
+ @elidable(INFO) def f4 = fail("Should have been elided (no parens).")
}
object Test {
- @elidable(FINEST) def f1() = assert(false, "Should have been elided.")
- @elidable(INFO) def f2() = assert(false, "Should have been elided.")
+ @elidable(FINEST) def f1() = fail("Should have been elided.")
+ @elidable(INFO) def f2() = fail("Should have been elided.")
@elidable(SEVERE) def f3() = println("Good for me, I was not elided. Test.f3")
- @elidable(INFO) def f4 = assert(false, "Should have been elided (no parens).")
+ @elidable(INFO) def f4 = fail("Should have been elided (no parens).")
@elidable(FINEST) def f5() = {}
@elidable(FINEST) def f6() = true
@@ -38,12 +43,12 @@ object Test {
@elidable(FINEST) def fb() = 1l
@elidable(FINEST) def fc() = 1.0f
@elidable(FINEST) def fd() = 1.0
- @elidable(FINEST) def fe() = "s"
+ @elidable(FINEST) def fe() = { fail("Should have been elided to empty string.") ; "hello, world" }
/* variable elisions? see test/files/neg/t10068.scala
- @elidable(INFO) val goner1: Int = { assert(false, "Should have been elided.") ; 42 }
- @elidable(INFO) lazy val goner2: Int = { assert(false, "Should have been elided.") ; 42 }
- @elidable(INFO) var goner3: Int = { assert(false, "Should have been elided.") ; 42 }
+ @elidable(INFO) val goner1: Int = { fail("Should have been elided.") ; 42 }
+ @elidable(INFO) lazy val goner2: Int = { fail("Should have been elided.") ; 42 }
+ @elidable(INFO) var goner3: Int = { fail("Should have been elided.") ; 42 }
@elidable(INFO) var goner4: Nothing = _
*/
@@ -74,6 +79,19 @@ object Test {
println(fc())
println(fd())
println(fe())
+ if (!fe().isEmpty) fail(s"Not empty: [${fe()}]")
+/*
+()
+false
+0
+0
+0
+0
+0
+0.0
+0.0
+ // was: null
+*/
// this one won't show up in the output because a call to f1 is elidable when accessed through T
(c:T).f1()
diff --git a/test/files/run/literals.scala b/test/files/run/literals.scala
index 25501123b5..a7962e5cd9 100644
--- a/test/files/run/literals.scala
+++ b/test/files/run/literals.scala
@@ -6,7 +6,7 @@
object Test {
- /* I add a couple of Unicode identifier tests here temporarily */
+ /* I add a couple of Unicode identifier tests here "temporarily" */
def \u03b1\u03c1\u03b5\u03c4\u03b7 = "alpha rho epsilon tau eta"
@@ -80,6 +80,9 @@ object Test {
check_success("1e1f == 10.0f", 1e1f, 10.0f)
check_success(".3f == 0.3f", .3f, 0.3f)
check_success("0f == 0.0f", 0f, 0.0f)
+ check_success("0f == -0.000000000000000000e+00f", 0f, -0.000000000000000000e+00f)
+ check_success("0f == -0.000000000000000000e+00F", 0f, -0.000000000000000000e+00F)
+ check_success("0f == -0.0000000000000000e14f", 0f, -0.0000000000000000e14f)
check_success("01.23f == 1.23f", 01.23f, 1.23f)
check_success("3.14f == 3.14f", 3.14f, 3.14f)
check_success("6.022e23f == 6.022e23f", 6.022e23f, 6.022e23f)
@@ -96,6 +99,11 @@ object Test {
check_success(".3 == 0.3", .3, 0.3)
check_success("0.0 == 0.0", 0.0, 0.0)
check_success("0d == 0.0", 0d, 0.0)
+ check_success("0d == 0.000000000000000000e+00d", 0d, 0.000000000000000000e+00d)
+ check_success("0d == -0.000000000000000000e+00d", 0d, -0.000000000000000000e+00d)
+ check_success("0d == -0.000000000000000000e+00D", 0d, -0.000000000000000000e+00D)
+ check_success("0.0 == 0.000000000000000000e+00", 0.0, 0.000000000000000000e+00)
+ check_success("0.0 == -0.000000000000000000e+00", 0.0, -0.000000000000000000e+00)
check_success("01.23 == 1.23", 01.23, 1.23)
check_success("01.23d == 1.23d", 01.23d, 1.23d)
check_success("3.14 == 3.14", 3.14, 3.14)
diff --git a/test/files/run/t10026.check b/test/files/run/t10026.check
new file mode 100644
index 0000000000..15a62794a9
--- /dev/null
+++ b/test/files/run/t10026.check
@@ -0,0 +1 @@
+List(1, 2, 3)
diff --git a/test/files/run/t10026.scala b/test/files/run/t10026.scala
new file mode 100644
index 0000000000..a56840c8c2
--- /dev/null
+++ b/test/files/run/t10026.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe
+import scala.tools.reflect.ToolBox
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val classloader = getClass.getClassLoader
+ val toolbox = universe.runtimeMirror(classloader).mkToolBox()
+ println(toolbox.compile(toolbox.parse("Array(1, 2, 3).toList")).apply())
+ }
+}
+
diff --git a/test/files/run/t10171/Test.scala b/test/files/run/t10171/Test.scala
new file mode 100644
index 0000000000..37a2cfc67f
--- /dev/null
+++ b/test/files/run/t10171/Test.scala
@@ -0,0 +1,59 @@
+import scala.tools.partest._
+import java.io.File
+
+object Test extends StoreReporterDirectTest {
+ def code = ???
+
+ def compileCode(code: String) = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code)
+ }
+
+ def library = """
+package a {
+ package b {
+ class C { class D }
+ }
+}
+package z {
+ class Base {
+ type S = String
+ def foo(s: S): a.b.C#D = null
+ }
+ class Sub extends Base {
+ def sub = "sub"
+ }
+}
+ """
+
+ def client = """
+ class Client { new z.Sub().sub }
+ """
+
+ def deleteClass(s: String) = {
+ val f = new File(testOutput.path, s + ".class")
+ assert(f.exists)
+ f.delete()
+ }
+
+ def deletePackage(s: String) = {
+ val f = new File(testOutput.path, s)
+ assert(f.exists)
+ f.delete()
+ }
+
+ def assertNoErrors(): Unit = {
+ assert(storeReporter.infos.isEmpty, storeReporter.infos.mkString("\n"))
+ storeReporter.reset()
+ }
+ def show(): Unit = {
+ compileCode(library)
+ assertNoErrors()
+ deleteClass("a/b/C$D")
+ deleteClass("a/b/C")
+ deletePackage("a/b")
+ compileCode(client)
+ assertNoErrors()
+ }
+}
+
diff --git a/test/files/run/t9880-9881.check b/test/files/run/t9880-9881.check
new file mode 100644
index 0000000000..36513e249a
--- /dev/null
+++ b/test/files/run/t9880-9881.check
@@ -0,0 +1,36 @@
+
+scala> // import in various ways
+
+scala> import java.util.Date
+import java.util.Date
+
+scala> import scala.util._
+import scala.util._
+
+scala> import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{universe=>ru}
+
+scala> import ru.TypeTag
+import ru.TypeTag
+
+scala>
+
+scala> // show the imports
+
+scala> :imports
+ 1) import java.lang._ (...)
+ 2) import scala._ (...)
+ 3) import scala.Predef._ (...)
+ 4) import java.util.Date (...)
+ 5) import scala.util._ (...)
+ 6) import scala.reflect.runtime.{universe=>ru} (...)
+ 7) import ru.TypeTag (...)
+
+scala>
+
+scala> // should be able to define this class with the imports above
+
+scala> class C[T](date: Date, rand: Random, typeTag: TypeTag[T])
+defined class C
+
+scala> :quit
diff --git a/test/files/run/t9880-9881.scala b/test/files/run/t9880-9881.scala
new file mode 100644
index 0000000000..0268c8c32c
--- /dev/null
+++ b/test/files/run/t9880-9881.scala
@@ -0,0 +1,29 @@
+import scala.tools.partest.ReplTest
+import scala.tools.nsc.Settings
+
+object Test extends ReplTest {
+
+ override def transformSettings(s: Settings): Settings = {
+ s.Yreplclassbased.value = true
+ s
+ }
+
+ lazy val normalizeRegex = """(import\s.*)\(.*\)""".r
+
+ override def normalize(s: String): String = normalizeRegex.replaceFirstIn(s, "$1(...)")
+
+ def code =
+ """
+ |// import in various ways
+ |import java.util.Date
+ |import scala.util._
+ |import scala.reflect.runtime.{universe => ru}
+ |import ru.TypeTag
+ |
+ |// show the imports
+ |:imports
+ |
+ |// should be able to define this class with the imports above
+ |class C[T](date: Date, rand: Random, typeTag: TypeTag[T])
+ """.stripMargin
+}
diff --git a/test/junit/scala/reflect/internal/NamesTest.scala b/test/junit/scala/reflect/internal/NamesTest.scala
index 549c10abed..d6182e7cca 100644
--- a/test/junit/scala/reflect/internal/NamesTest.scala
+++ b/test/junit/scala/reflect/internal/NamesTest.scala
@@ -92,4 +92,29 @@ class NamesTest {
assert(h1 string_== h2)
assert(h1 string_== h1y)
}
+
+ @Test
+ def pos(): Unit = {
+ def check(nameString: String, sub: String) = {
+ val name = TermName(nameString)
+ val javaResult = name.toString.indexOf(sub) match { case -1 => name.length case x => x }
+ val nameResult = name.pos(sub)
+ assertEquals(javaResult, nameResult)
+ if (sub.length == 1) {
+ val nameResultChar = name.pos(sub.head)
+ assertEquals(javaResult, nameResultChar)
+ }
+ }
+
+ check("a", "a") // was "String index out of range: 1
+ check("a", "b")
+ check("a", "ab")
+ check("a", "ba")
+ check("ab", "a")
+ check("ab", "b")
+ check("ab", "ab")
+ check("ab", "ba")
+ check("", "x")
+ check("", "xy")
+ }
}
diff --git a/test/junit/scala/sys/process/PipedProcessTest.scala b/test/junit/scala/sys/process/PipedProcessTest.scala
index 53f053e9aa..3f403dbe75 100644
--- a/test/junit/scala/sys/process/PipedProcessTest.scala
+++ b/test/junit/scala/sys/process/PipedProcessTest.scala
@@ -7,13 +7,19 @@ import java.io.{InputStream, OutputStream, PipedInputStream, PipedOutputStream,
ByteArrayOutputStream, IOException, Closeable}
import java.lang.reflect.InvocationTargetException
import scala.concurrent.{Await, Future}
-import scala.concurrent.duration.{Duration, SECONDS}
import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.control.Exception.ignoring
-// Each test normally ends in a moment, but for failure cases, waits until one second.
+// Each test normally ends in a moment, but for failure cases, waits two seconds.
// SI-7350, SI-8768
+// one second wasn't always enough --
+// https://github.com/scala/scala-dev/issues/313
+object TestDuration {
+ import scala.concurrent.duration.{Duration, SECONDS}
+ val Standard = Duration(2, SECONDS)
+}
+
@RunWith(classOf[JUnit4])
class PipedProcessTest {
class ProcessMock(error: Boolean) extends Process {
@@ -81,7 +87,7 @@ class PipedProcessTest {
val f = Future {
p.callRunAndExitValue(source, sink)
}
- Await.result(f, Duration(1, SECONDS))
+ Await.result(f, TestDuration.Standard)
assert(source.releaseCount == 0)
assert(sink.releaseCount == 0)
assert(a.destroyCount == 0)
@@ -102,7 +108,7 @@ class PipedProcessTest {
p.callRunAndExitValue(source, sink)
}
}
- Await.result(f, Duration(1, SECONDS))
+ Await.result(f, TestDuration.Standard)
assert(source.releaseCount == 1)
assert(sink.releaseCount == 1)
assert(a.destroyCount == 0)
@@ -123,7 +129,7 @@ class PipedProcessTest {
p.callRunAndExitValue(source, sink)
}
}
- Await.result(f, Duration(1, SECONDS))
+ Await.result(f, TestDuration.Standard)
assert(source.releaseCount == 1)
assert(sink.releaseCount == 1)
assert(a.destroyCount == 0)
@@ -142,7 +148,7 @@ class PipedProcessTest {
val f = Future {
p.callRunAndExitValue(source, sink)
}
- Await.result(f, Duration(1, SECONDS))
+ Await.result(f, TestDuration.Standard)
assert(source.releaseCount == 1)
assert(sink.releaseCount == 1)
assert(a.destroyCount == 1)
@@ -161,7 +167,7 @@ class PipedProcessTest {
val f = Future {
p.callRunAndExitValue(source, sink)
}
- Await.result(f, Duration(1, SECONDS))
+ Await.result(f, TestDuration.Standard)
assert(source.releaseCount == 1)
assert(sink.releaseCount == 1)
assert(a.destroyCount == 1)
@@ -235,7 +241,7 @@ class PipeSourceSinkTest {
source.join()
sink.join()
}
- Await.result(f, Duration(1, SECONDS))
+ Await.result(f, TestDuration.Standard)
assert(in.closed == true)
assert(out.closed == true)
assert(source.isReleased == true)
@@ -253,7 +259,7 @@ class PipeSourceSinkTest {
source.release()
sink.release()
}
- Await.result(f, Duration(1, SECONDS))
+ Await.result(f, TestDuration.Standard)
assert(out.closed == true)
assert(source.isReleased == true)
assert(sink.isReleased == true)
@@ -270,13 +276,13 @@ class PipeSourceSinkTest {
source.release()
sink.release()
}
- Await.result(f, Duration(1, SECONDS))
+ Await.result(f, TestDuration.Standard)
assert(in.closed == true)
assert(source.isReleased == true)
assert(sink.isReleased == true)
}
- // PipeSource and PipeSink must release resources when interrupted during copy streams"
+ // PipeSource and PipeSink must release resources when interrupted during copy streams
@Test
def runloopInterrupted() {
val in = new DebugInfinityInputStream
@@ -290,7 +296,7 @@ class PipeSourceSinkTest {
source.release()
sink.release()
}
- Await.result(f, Duration(1, SECONDS))
+ Await.result(f, TestDuration.Standard)
assert(in.closed == true)
assert(out.closed == true)
assert(source.isReleased == true)
diff --git a/test/junit/scala/tools/nsc/classpath/JrtClassPathTest.scala b/test/junit/scala/tools/nsc/classpath/JrtClassPathTest.scala
new file mode 100644
index 0000000000..2c3c5134da
--- /dev/null
+++ b/test/junit/scala/tools/nsc/classpath/JrtClassPathTest.scala
@@ -0,0 +1,41 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.tools.nsc.Settings
+import scala.tools.nsc.backend.jvm.AsmUtils
+import scala.tools.nsc.util.ClassPath
+import scala.tools.util.PathResolver
+
+@RunWith(classOf[JUnit4])
+class JrtClassPathTest {
+
+ @Test def lookupJavaClasses(): Unit = {
+ val specVersion = scala.util.Properties.javaSpecVersion
+ // Run the test using the JDK8 or 9 provider for rt.jar depending on the platform the test is running on.
+ val cp: ClassPath =
+ if (specVersion == "" || specVersion == "1.8") {
+ val settings = new Settings()
+ val resolver = new PathResolver(settings)
+ val elements = new ClassPathFactory(settings).classesInPath(resolver.Calculated.javaBootClassPath)
+ AggregateClassPath(elements)
+ }
+ else JrtClassPath().get
+
+ assertEquals(Nil, cp.classes(""))
+ assertTrue(cp.packages("java").toString, cp.packages("java").exists(_.name == "java.lang"))
+ assertTrue(cp.classes("java.lang").exists(_.name == "Object"))
+ val jl_Object = cp.classes("java.lang").find(_.name == "Object").get
+ assertEquals("java/lang/Object", AsmUtils.classFromBytes(jl_Object.file.toByteArray).name)
+ assertTrue(cp.list("java.lang").packages.exists(_.name == "java.lang.annotation"))
+ assertTrue(cp.list("java.lang").classesAndSources.exists(_.name == "Object"))
+ assertTrue(cp.findClass("java.lang.Object").isDefined)
+ assertTrue(cp.findClassFile("java.lang.Object").isDefined)
+ }
+}
diff --git a/test/scalacheck/duration.scala b/test/scalacheck/duration.scala
index 89cb9ff955..fc861b886a 100644
--- a/test/scalacheck/duration.scala
+++ b/test/scalacheck/duration.scala
@@ -32,7 +32,10 @@ object DurationTest extends Properties("Division of Duration by Long") {
val genClose = for {
a <- weightedLong
if a != 0
- b <- choose(Long.MaxValue / a - 10, Long.MaxValue / a + 10)
+ val center = Long.MaxValue / a
+ b <-
+ if (center - 10 < center + 10) choose(center - 10, center + 10)
+ else choose(center + 10, center - 10) // deal with overflow if abs(a) == 1
} yield (a, b)
val genBorderline =
diff --git a/test/scaladoc/run/SI-9704.check b/test/scaladoc/run/SI-9704.check
new file mode 100644
index 0000000000..5a73befd9b
--- /dev/null
+++ b/test/scaladoc/run/SI-9704.check
@@ -0,0 +1,4 @@
+Chain(List(Chain(List(Text(Demonstrates a scala issue in which the closing link tag is duplicated), Text(
+), HtmlTag(<a href="https://link">title</a>), Text(
+), Text()))))
+Done.
diff --git a/test/scaladoc/run/SI-9704.scala b/test/scaladoc/run/SI-9704.scala
new file mode 100644
index 0000000000..e6f071704e
--- /dev/null
+++ b/test/scaladoc/run/SI-9704.scala
@@ -0,0 +1,22 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+ override def code = """
+ object Foo {
+ /**
+ * Demonstrates a scala issue in which the closing link tag is duplicated
+ * <a href="https://link">title</a>
+ */
+ def bar = ???
+ }
+ """
+
+ def scaladocSettings = ""
+
+ def testModel(root: Package) = {
+ import access._
+ val thing = root._object("Foo")._method("bar")
+ println(thing.comment.get.short)
+ }
+}
diff --git a/test/scaladoc/run/inlineToStr-strips-unwanted-text.check b/test/scaladoc/run/inlineToStr-strips-unwanted-text.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/inlineToStr-strips-unwanted-text.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/inlineToStr-strips-unwanted-text.scala b/test/scaladoc/run/inlineToStr-strips-unwanted-text.scala
new file mode 100644
index 0000000000..8faaf1b93d
--- /dev/null
+++ b/test/scaladoc/run/inlineToStr-strips-unwanted-text.scala
@@ -0,0 +1,58 @@
+import scala.tools.nsc.doc.html.Page
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ /** This comment contains ^superscript^ */
+ class Foo {
+ /** This comment contains ,,subscript,, */
+ def bar = ???
+
+ /** This comment contains a link [[https://scala.epfl.ch/]] */
+ def baz = ???
+
+ /** This comment contains an <strong>html tag</strong> */
+ def qux = ???
+
+ /** This comment contains a<br> single html tag */
+ def quux = ???
+
+ /** This comment contains nested <strong>html<br> tags</strong> */
+ def quuz = ???
+
+ /** This comment contains a [[corge ,,link with a subscript title,,]] */
+ def corge = ???
+ }
+ """
+ def scaladocSettings = ""
+
+ def testModel(root: Package) = {
+ import scala.tools.nsc.doc.base.comment._
+ import access._
+
+ val foo = root._class("Foo")
+
+ val fooStr = Page.inlineToStr(foo.comment.get.short)
+ assert(fooStr == "This comment contains superscript", fooStr)
+
+ val barStr = Page.inlineToStr(foo._method("bar").comment.get.short)
+ assert(barStr == "This comment contains subscript", barStr)
+
+ val bazStr = Page.inlineToStr(foo._method("baz").comment.get.short)
+ assert(bazStr == "This comment contains a link https://scala.epfl.ch/", bazStr)
+
+ val quxStr = Page.inlineToStr(foo._method("qux").comment.get.short)
+ assert(quxStr == "This comment contains an html tag", quxStr)
+
+ val quuxStr = Page.inlineToStr(foo._method("quux").comment.get.short)
+ assert(quuxStr == "This comment contains a single html tag", quuxStr)
+
+ val quuzStr = Page.inlineToStr(foo._method("quuz").comment.get.short)
+ assert(quuzStr == "This comment contains nested html tags", quuzStr)
+
+ val corgeStr = Page.inlineToStr(foo._method("corge").comment.get.short)
+ assert(corgeStr == "This comment contains a link with a subscript title", corgeStr)
+ }
+}
diff --git a/test/scaladoc/run/shortDescription-annotation.scala b/test/scaladoc/run/shortDescription-annotation.scala
index 0e2950f4f9..4f9a891133 100644
--- a/test/scaladoc/run/shortDescription-annotation.scala
+++ b/test/scaladoc/run/shortDescription-annotation.scala
@@ -1,3 +1,4 @@
+import scala.tools.nsc.doc.html.Page
import scala.tools.nsc.doc.model._
import scala.tools.partest.ScaladocModelTest
@@ -26,30 +27,18 @@ object Test extends ScaladocModelTest {
import scala.tools.nsc.doc.base.comment._
import access._
- def inlineToStr(inl: Inline): String = inl match {
- case Chain(items) => items flatMap (inlineToStr(_)) mkString ""
- case Italic(in) => inlineToStr(in)
- case Bold(in) => inlineToStr(in)
- case Underline(in) => inlineToStr(in)
- case Monospace(in) => inlineToStr(in)
- case Text(text) => text
- case Summary(in) => inlineToStr(in)
- case EntityLink(Text(text), _) => text
- case _ => inl.toString
- }
-
val foo = rootPackage._package("a")._class("Foo")
// Assert that the class has the correct short description
- val classDesc = inlineToStr(foo.comment.get.short)
+ val classDesc = Page.inlineToStr(foo.comment.get.short)
assert(classDesc == "This one should appear", classDesc)
// Assert that the `foo` method has the correct short description
- val fooDesc = inlineToStr(foo._method("foo").comment.get.short)
+ val fooDesc = Page.inlineToStr(foo._method("foo").comment.get.short)
assert(fooDesc == "This comment should appear", fooDesc)
// Assert that the `goo` method has the correct short description
- val gooDesc = inlineToStr(foo._method("goo").comment.get.short)
+ val gooDesc = Page.inlineToStr(foo._method("goo").comment.get.short)
assert(gooDesc == "This comment should appear", gooDesc)
}
}
diff --git a/tools/get-scala-commit-date b/tools/get-scala-commit-date
deleted file mode 100755
index 6511ed98ca..0000000000
--- a/tools/get-scala-commit-date
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env bash
-#
-# Usage: get-scala-commit-date [dir]
-# Figures out current commit date of a git clone.
-# If no dir is given, current working dir is used.
-#
-# Example build version string:
-# 20120312
-#
-
-[[ $# -eq 0 ]] || cd "$1"
-
-if git rev-parse --is-inside-work-tree > /dev/null 2>&1; then
- lastcommitdate=$(git log --format="%ci" HEAD | head -n 1 | cut -d ' ' -f 1)
- lastcommithours=$(git log --format="%ci" HEAD | head -n 1 | cut -d ' ' -f 2)
-else
- lastcommitdate=$(date +%Y-%m-%d)
- lastcommithours=$(date +%H:%M:%S)
-fi
-
-# 20120324
-echo "${lastcommitdate//-/}-${lastcommithours//:/}"
diff --git a/tools/get-scala-commit-date.bat b/tools/get-scala-commit-date.bat
deleted file mode 100644
index 735a80b927..0000000000
--- a/tools/get-scala-commit-date.bat
+++ /dev/null
@@ -1,9 +0,0 @@
-@echo off
-for %%X in (bash.exe) do (set FOUND=%%~$PATH:X)
-if defined FOUND (
- bash "%~dp0\get-scala-commit-date" 2>NUL
-) else (
- rem echo this script does not work with cmd.exe. please, install bash
- echo unknown
- exit 1
-)
diff --git a/tools/get-scala-commit-sha b/tools/get-scala-commit-sha
deleted file mode 100755
index 18289c7ca8..0000000000
--- a/tools/get-scala-commit-sha
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env bash
-#
-# Usage: get-scala-commit-sha [dir]
-# Figures out current commit sha of a git clone.
-# If no dir is given, current working dir is used.
-#
-# Example build version string:
-# 6f1c486d0ba
-#
-
-[[ $# -eq 0 ]] || cd "$1"
-
-if git rev-parse --is-inside-work-tree > /dev/null 2>&1; then
- # printf %016s is not portable for 0-padding, has to be a digit.
- # so we're stuck disassembling it.
- hash=$(git log -1 --format="%H" HEAD)
- hash=${hash#g}
- hash=${hash:0:10}
-else
- hash="unknown"
-fi
-echo "$hash"
diff --git a/tools/get-scala-commit-sha.bat b/tools/get-scala-commit-sha.bat
deleted file mode 100644
index 6559a19120..0000000000
--- a/tools/get-scala-commit-sha.bat
+++ /dev/null
@@ -1,9 +0,0 @@
-@echo off
-for %%X in (bash.exe) do (set FOUND=%%~$PATH:X)
-if defined FOUND (
- bash "%~dp0\get-scala-commit-sha" 2>NUL
-) else (
- rem echo this script does not work with cmd.exe. please, install bash
- echo unknown
- exit 1
-)
diff --git a/versions.properties b/versions.properties
index 26e8324c44..f767dfab7b 100644
--- a/versions.properties
+++ b/versions.properties
@@ -21,7 +21,7 @@ scala-swing.version.number=2.0.0
scala-swing.version.osgi=2.0.0
jline.version=2.14.3
# this one is shaded and embedded in scala-compiler.jar
-scala-asm.version=5.1.0-scala-1
+scala-asm.version=5.1.0-scala-2
# external modules, used internally (not shipped)
partest.version.number=1.1.0