summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--bincompat-backward.whitelist.conf5
-rwxr-xr-xbuild.xml5
-rw-r--r--spec/01-lexical-syntax.md6
-rw-r--r--spec/13-syntax-summary.md4
-rwxr-xr-xsrc/compiler/scala/tools/ant/templates/tool-unix.tmpl12
-rw-r--r--src/compiler/scala/tools/ant/templates/tool-windows.tmpl2
-rw-r--r--src/compiler/scala/tools/nsc/CompileClient.scala4
-rw-r--r--src/compiler/scala/tools/nsc/CompileServer.scala39
-rw-r--r--src/compiler/scala/tools/nsc/CompileSocket.scala30
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala152
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala7
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala3
-rw-r--r--src/compiler/scala/tools/nsc/backend/JavaPlatform.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala2
-rw-r--r--src/compiler/scala/tools/nsc/settings/FscSettings.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala7
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala3
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Checkable.scala48
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala17
-rw-r--r--src/compiler/scala/tools/nsc/util/ClassPath.scala19
-rw-r--r--src/compiler/scala/tools/util/SocketServer.scala4
-rw-r--r--src/library/scala/Enumeration.scala1
-rwxr-xr-xsrc/library/scala/collection/IndexedSeqOptimized.scala4
-rw-r--r--src/library/scala/collection/IterableViewLike.scala4
-rw-r--r--src/library/scala/collection/Iterator.scala4
-rwxr-xr-xsrc/library/scala/collection/JavaConverters.scala4
-rw-r--r--src/library/scala/collection/LinearSeq.scala7
-rw-r--r--src/library/scala/collection/LinearSeqLike.scala30
-rwxr-xr-xsrc/library/scala/collection/LinearSeqOptimized.scala35
-rw-r--r--src/library/scala/collection/SeqLike.scala10
-rw-r--r--src/library/scala/collection/SeqViewLike.scala1
-rw-r--r--src/library/scala/collection/concurrent/Map.scala2
-rw-r--r--src/library/scala/collection/generic/GenericTraversableTemplate.scala2
-rw-r--r--src/library/scala/collection/immutable/PagedSeq.scala8
-rw-r--r--src/library/scala/collection/immutable/TreeMap.scala4
-rw-r--r--src/library/scala/collection/immutable/TreeSet.scala4
-rw-r--r--src/library/scala/collection/mutable/ArrayBuffer.scala4
-rw-r--r--src/library/scala/collection/mutable/MapLike.scala2
-rw-r--r--src/library/scala/collection/mutable/MultiMap.scala7
-rw-r--r--src/library/scala/collection/mutable/MutableList.scala2
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala7
-rw-r--r--src/library/scala/collection/mutable/StringBuilder.scala2
-rw-r--r--src/library/scala/collection/parallel/ParIterable.scala3
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala3
-rw-r--r--src/library/scala/collection/parallel/ParMapLike.scala2
-rw-r--r--src/library/scala/collection/parallel/ParSetLike.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParMapLike.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSet.scala3
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSetLike.scala2
-rw-r--r--src/library/scala/concurrent/ExecutionContext.scala18
-rw-r--r--src/library/scala/math/BigDecimal.scala4
-rw-r--r--src/library/scala/runtime/Tuple2Zipped.scala4
-rw-r--r--src/library/scala/runtime/Tuple3Zipped.scala7
-rw-r--r--src/library/scala/sys/SystemProperties.scala2
-rw-r--r--src/reflect/scala/reflect/api/Constants.scala4
-rw-r--r--src/reflect/scala/reflect/api/Exprs.scala2
-rw-r--r--src/reflect/scala/reflect/api/FlagSets.scala6
-rw-r--r--src/reflect/scala/reflect/api/Mirror.scala2
-rw-r--r--src/reflect/scala/reflect/api/Mirrors.scala2
-rw-r--r--src/reflect/scala/reflect/api/Names.scala8
-rw-r--r--src/reflect/scala/reflect/api/Printers.scala30
-rw-r--r--src/reflect/scala/reflect/api/StandardDefinitions.scala6
-rw-r--r--src/reflect/scala/reflect/api/Symbols.scala2
-rw-r--r--src/reflect/scala/reflect/api/Trees.scala8
-rw-r--r--src/reflect/scala/reflect/internal/Depth.scala16
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala13
-rw-r--r--src/reflect/scala/reflect/internal/pickling/UnPickler.scala21
-rw-r--r--src/reflect/scala/reflect/io/AbstractFile.scala16
-rw-r--r--src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala8
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ILoop.scala53
-rw-r--r--src/repl/scala/tools/nsc/interpreter/IMain.scala26
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/DocParser.scala3
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala2
-rw-r--r--test/disabled/run/t8946.scala29
-rw-r--r--test/files/neg/t2866.check17
-rw-r--r--test/files/neg/t2866.scala59
-rw-r--r--test/files/neg/t5148.check7
-rw-r--r--test/files/neg/t5639b.check4
-rw-r--r--test/files/neg/t5639b/A_1.scala17
-rw-r--r--test/files/neg/t5639b/A_2.scala11
-rw-r--r--test/files/neg/t8534.check4
-rw-r--r--test/files/neg/t8534.scala7
-rw-r--r--test/files/neg/t8534b.check4
-rw-r--r--test/files/neg/t8534b.scala4
-rw-r--r--test/files/neg/t8597.check21
-rw-r--r--test/files/neg/t8597.flags1
-rw-r--r--test/files/neg/t8597.scala27
-rw-r--r--test/files/neg/t8597b.check6
-rw-r--r--test/files/neg/t8597b.flags1
-rw-r--r--test/files/neg/t8597b.scala21
-rw-r--r--test/files/neg/t963.check8
-rw-r--r--test/files/neg/unchecked-abstract.check14
-rw-r--r--test/files/pos/t5639.flags1
-rw-r--r--test/files/pos/t5639/A_1.scala17
-rw-r--r--test/files/pos/t5639/A_2.scala11
-rw-r--r--test/files/pos/t5639/Bar.scala7
-rw-r--r--test/files/pos/t5639/Foo.scala7
-rw-r--r--test/files/pos/t7596/A_1.scala10
-rw-r--r--test/files/pos/t7596/B_2.scala19
-rw-r--r--test/files/pos/t7596b/A.scala10
-rw-r--r--test/files/pos/t7596b/B.scala6
-rw-r--r--test/files/pos/t7596c/A_1.scala11
-rw-r--r--test/files/pos/t7596c/B_2.scala9
-rw-r--r--test/files/pos/t7683-stop-after-parser/ThePlugin.scala31
-rw-r--r--test/files/pos/t7683-stop-after-parser/sample_2.flags1
-rw-r--r--test/files/pos/t7683-stop-after-parser/sample_2.scala6
-rw-r--r--test/files/pos/t7683-stop-after-parser/scalac-plugin.xml5
-rw-r--r--test/files/pos/t9018.scala16
-rw-r--r--test/files/run/iterator-concat.check4
-rw-r--r--test/files/run/iterator-concat.scala15
-rw-r--r--test/files/run/iterator-iterate-lazy.scala5
-rw-r--r--test/files/run/iterators.check14
-rw-r--r--test/files/run/iterators.scala168
-rw-r--r--test/files/run/priorityQueue.scala373
-rw-r--r--test/files/run/t2866.check3
-rw-r--r--test/files/run/t2866.scala44
-rw-r--r--test/files/run/t3516.check3
-rw-r--r--test/files/run/t3516.scala13
-rw-r--r--test/files/run/t5938.scala35
-rw-r--r--test/files/run/t6440.check9
-rw-r--r--test/files/run/t6440.scala2
-rw-r--r--test/files/run/t6502.check8
-rw-r--r--test/files/run/t6502.scala101
-rw-r--r--test/files/run/t7407.flags2
-rw-r--r--test/files/run/t8253.check40
-rw-r--r--test/files/run/t8253.scala14
-rw-r--r--test/files/run/t8502.scala41
-rw-r--r--test/files/run/t8925.check2
-rw-r--r--test/files/run/t8925.flags1
-rw-r--r--test/files/run/t8925.scala31
-rw-r--r--test/files/run/t9003.flags1
-rw-r--r--test/files/run/t9003.scala71
-rw-r--r--test/files/run/t9027.check19
-rw-r--r--test/files/run/t9027.scala15
-rw-r--r--test/files/t8449/Client.scala3
-rw-r--r--test/files/t8449/Test.java10
-rw-r--r--test/junit/scala/collection/IndexedSeqOptimizedTest.scala13
-rw-r--r--test/junit/scala/collection/IterableViewLikeTest.scala20
-rw-r--r--test/junit/scala/collection/IteratorTest.scala133
-rw-r--r--test/junit/scala/collection/immutable/ListTest.scala49
-rw-r--r--test/junit/scala/collection/immutable/PagedSeqTest.scala18
-rw-r--r--test/junit/scala/collection/immutable/TreeMapTest.scala20
-rw-r--r--test/junit/scala/collection/immutable/TreeSetTest.scala20
-rw-r--r--test/junit/scala/math/BigDecimalTest.scala6
-rw-r--r--test/junit/scala/tools/testing/AssertUtil.scala20
-rwxr-xr-xtools/binary-repo-lib.sh5
148 files changed, 1750 insertions, 835 deletions
diff --git a/bincompat-backward.whitelist.conf b/bincompat-backward.whitelist.conf
index 076b9bb9aa..56d5b0135c 100644
--- a/bincompat-backward.whitelist.conf
+++ b/bincompat-backward.whitelist.conf
@@ -198,6 +198,11 @@ filter {
{
matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$4"
problemName=MissingMethodProblem
+ },
+ // SI-8946
+ {
+ matchName="scala.reflect.runtime.ThreadLocalStorage#MyThreadLocalStorage.values"
+ problemName=MissingMethodProblem
}
]
}
diff --git a/build.xml b/build.xml
index cb72b05e3e..02b98e66d8 100755
--- a/build.xml
+++ b/build.xml
@@ -848,8 +848,7 @@ TODO:
-->
<path id="pack.reflect.files"> <fileset dir="${build-quick.dir}/classes/reflect"/> </path>
- <path id="pack.scalap.files"> <fileset dir="${build-quick.dir}/classes/scalap"/>
- <fileset file="${src.dir}/scalap/decoder.properties"/> </path>
+ <path id="pack.scalap.files"> <fileset dir="${build-quick.dir}/classes/scalap"/> </path>
<path id="pack.partest-extras.files"> <fileset dir="${build-quick.dir}/classes/partest-extras"/> </path>
<path id="pack.partest-javaagent.files"> <fileset dir="${build-quick.dir}/classes/partest-javaagent"/> </path>
@@ -1386,6 +1385,7 @@ TODO:
<echo message="Test pass 1 of 2 using Apache Felix ${osgi.felix.version}"/>
<junit fork="yes" haltonfailure="yes">
<classpath refid="test.osgi.compiler.build.path.felix"/>
+ <jvmarg value="-Duser.home=${user.home}"/>
<batchtest fork="yes" todir="${build-osgi.dir}">
<fileset dir="${test.osgi.classes}">
<include name="**/*Test.class"/>
@@ -1397,6 +1397,7 @@ TODO:
<echo message="Test pass 2 of 2 using Eclipse Equinox ${osgi.equinox.version}"/>
<junit fork="yes" haltonfailure="yes">
<classpath refid="test.osgi.compiler.build.path.equinox"/>
+ <jvmarg value="-Duser.home=${user.home}"/>
<batchtest fork="yes" todir="${build-osgi.dir}">
<fileset dir="${test.osgi.classes}">
<include name="**/*Test.class"/>
diff --git a/spec/01-lexical-syntax.md b/spec/01-lexical-syntax.md
index d5752bbdf0..945dedf99b 100644
--- a/spec/01-lexical-syntax.md
+++ b/spec/01-lexical-syntax.md
@@ -323,14 +323,12 @@ Literal ::= [‘-’] integerLiteral
### Integer Literals
```ebnf
-integerLiteral ::= (decimalNumeral | hexNumeral | octalNumeral)
+integerLiteral ::= (decimalNumeral | hexNumeral)
[‘L’ | ‘l’]
decimalNumeral ::= ‘0’ | nonZeroDigit {digit}
-hexNumeral ::= ‘0’ ‘x’ hexDigit {hexDigit}
-octalNumeral ::= ‘0’ octalDigit {octalDigit}
+hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit {hexDigit}
digit ::= ‘0’ | nonZeroDigit
nonZeroDigit ::= ‘1’ | … | ‘9’
-octalDigit ::= ‘0’ | … | ‘7’
```
Integer literals are usually of type `Int`, or of type
diff --git a/spec/13-syntax-summary.md b/spec/13-syntax-summary.md
index 86efcf70a8..2b9571cc73 100644
--- a/spec/13-syntax-summary.md
+++ b/spec/13-syntax-summary.md
@@ -41,7 +41,7 @@ idrest ::= {letter | digit} [‘_’ op]
integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’]
decimalNumeral ::= ‘0’ | nonZeroDigit {digit}
-hexNumeral ::= ‘0’ ‘x’ hexDigit {hexDigit}
+hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit {hexDigit}
digit ::= ‘0’ | nonZeroDigit
nonZeroDigit ::= ‘1’ | … | ‘9’
@@ -210,7 +210,7 @@ grammar.
ClassParams ::= ClassParam {‘,’ ClassParam}
ClassParam ::= {Annotation} {Modifier} [(`val' | `var')]
id ‘:’ ParamType [‘=’ Expr]
- Bindings ::= ‘(’ Binding {‘,’ Binding ‘)’
+ Bindings ::= ‘(’ Binding {‘,’ Binding} ‘)’
Binding ::= (id | ‘_’) [‘:’ Type]
Modifier ::= LocalModifier
diff --git a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
index f58223a39e..7acb3632d2 100755
--- a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
@@ -86,10 +86,14 @@ fi
TOOL_CLASSPATH="@classpath@"
if [[ -z "$TOOL_CLASSPATH" ]]; then
for ext in "$SCALA_HOME"/lib/* ; do
- if [[ -z "$TOOL_CLASSPATH" ]]; then
- TOOL_CLASSPATH="$ext"
- else
- TOOL_CLASSPATH="${TOOL_CLASSPATH}${SEP}${ext}"
+ file_extension="${ext##*.}"
+ # SI-8967 Only consider directories and files named '*.jar'
+ if [[ -d "$ext" || $file_extension == "jar" ]]; then
+ if [[ -z "$TOOL_CLASSPATH" ]]; then
+ TOOL_CLASSPATH="$ext"
+ else
+ TOOL_CLASSPATH="${TOOL_CLASSPATH}${SEP}${ext}"
+ fi
fi
done
fi
diff --git a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
index cf0e003f10..50e44fb669 100644
--- a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
@@ -128,7 +128,7 @@ if defined _JAVA_PARAMS set _JAVA_OPTS=%_JAVA_OPTS% %_JAVA_PARAMS%
set _TOOL_CLASSPATH=@classpath@
if "%_TOOL_CLASSPATH%"=="" (
- for %%f in ("!_SCALA_HOME!\lib\*") do call :add_cpath "%%f"
+ for %%f in ("!_SCALA_HOME!\lib\*.jar") do call :add_cpath "%%f"
for /d %%f in ("!_SCALA_HOME!\lib\*") do call :add_cpath "%%f"
)
diff --git a/src/compiler/scala/tools/nsc/CompileClient.scala b/src/compiler/scala/tools/nsc/CompileClient.scala
index 3017d8c9cc..f259504473 100644
--- a/src/compiler/scala/tools/nsc/CompileClient.scala
+++ b/src/compiler/scala/tools/nsc/CompileClient.scala
@@ -43,8 +43,8 @@ class StandardCompileClient extends HasCompileSocket with CompileOutputCommon {
info(vmArgs.mkString("[VM arguments: ", " ", "]"))
val socket =
- if (settings.server.value == "") compileSocket.getOrCreateSocket(vmArgs mkString " ", !shutdown)
- else Some(compileSocket.getSocket(settings.server.value))
+ if (settings.server.value == "") compileSocket.getOrCreateSocket(vmArgs mkString " ", !shutdown, settings.port.value)
+ else compileSocket.getSocket(settings.server.value)
socket match {
case Some(sock) => compileOnServer(sock, fscArgs)
diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala
index 029e1c4629..aa02957a6c 100644
--- a/src/compiler/scala/tools/nsc/CompileServer.scala
+++ b/src/compiler/scala/tools/nsc/CompileServer.scala
@@ -6,6 +6,7 @@
package scala.tools.nsc
import java.io.PrintStream
+import io.Directory
import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
import scala.reflect.internal.util.{FakePos, Position}
import scala.tools.util.SocketServer
@@ -19,7 +20,7 @@ import settings.FscSettings
* @author Martin Odersky
* @version 1.0
*/
-class StandardCompileServer extends SocketServer {
+class StandardCompileServer(fixPort: Int = 0) extends SocketServer(fixPort) {
lazy val compileSocket: CompileSocket = CompileSocket
private var compiler: Global = null
@@ -166,12 +167,12 @@ class StandardCompileServer extends SocketServer {
}
-object CompileServer extends StandardCompileServer {
+object CompileServer {
/** A directory holding redirected output */
- private lazy val redirectDir = (compileSocket.tmpDir / "output-redirects").createDirectory()
+ //private lazy val redirectDir = (compileSocket.tmpDir / "output-redirects").createDirectory()
- private def createRedirect(filename: String) =
- new PrintStream((redirectDir / filename).createFile().bufferedOutput())
+ private def createRedirect(dir: Directory, filename: String) =
+ new PrintStream((dir / filename).createFile().bufferedOutput())
def main(args: Array[String]) =
execute(() => (), args)
@@ -187,21 +188,33 @@ object CompileServer extends StandardCompileServer {
*/
def execute(startupCallback : () => Unit, args: Array[String]) {
val debug = args contains "-v"
+ var port = 0
+ val i = args.indexOf("-p")
+ if (i >= 0 && args.length > i + 1) {
+ scala.util.control.Exception.ignoring(classOf[NumberFormatException]) {
+ port = args(i + 1).toInt
+ }
+ }
+
+ // Create instance rather than extend to pass a port parameter.
+ val server = new StandardCompileServer(port)
+ val redirectDir = (server.compileSocket.tmpDir / "output-redirects").createDirectory()
+
if (debug) {
- echo("Starting CompileServer on port " + port)
- echo("Redirect dir is " + redirectDir)
+ server.echo("Starting CompileServer on port " + server.port)
+ server.echo("Redirect dir is " + redirectDir)
}
- Console.withErr(createRedirect("scala-compile-server-err.log")) {
- Console.withOut(createRedirect("scala-compile-server-out.log")) {
- Console.err.println("...starting server on socket "+port+"...")
+ Console.withErr(createRedirect(redirectDir, "scala-compile-server-err.log")) {
+ Console.withOut(createRedirect(redirectDir, "scala-compile-server-out.log")) {
+ Console.err.println("...starting server on socket "+server.port+"...")
Console.err.flush()
- compileSocket setPort port
+ server.compileSocket setPort server.port
startupCallback()
- run()
+ server.run()
- compileSocket deletePort port
+ server.compileSocket deletePort server.port
}
}
}
diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala
index c693fbe8e2..27a14141fa 100644
--- a/src/compiler/scala/tools/nsc/CompileSocket.scala
+++ b/src/compiler/scala/tools/nsc/CompileSocket.scala
@@ -46,6 +46,9 @@ trait HasCompileSocket {
class CompileSocket extends CompileOutputCommon {
protected lazy val compileClient: StandardCompileClient = CompileClient
def verbose = compileClient.verbose
+
+ /* Fixes the port where to start the server, 0 yields some free port */
+ var fixPort = 0
/** The prefix of the port identification file, which is followed
* by the port number.
@@ -64,7 +67,7 @@ class CompileSocket extends CompileOutputCommon {
/** The class name of the scala compile server */
protected val serverClass = "scala.tools.nsc.CompileServer"
- protected def serverClassArgs = if (verbose) List("-v") else Nil // debug
+ protected def serverClassArgs = (if (verbose) List("-v") else Nil) ::: (if (fixPort > 0) List("-p", fixPort.toString) else Nil)
/** A temporary directory to use */
val tmpDir = {
@@ -104,9 +107,14 @@ class CompileSocket extends CompileOutputCommon {
def portFile(port: Int) = portsDir / File(port.toString)
/** Poll for a server port number; return -1 if none exists yet */
- private def pollPort(): Int = portsDir.list.toList match {
+ private def pollPort(): Int = if (fixPort > 0) {
+ if (portsDir.list.toList.exists(_.name == fixPort.toString)) fixPort else -1
+ } else portsDir.list.toList match {
case Nil => -1
- case x :: xs => try x.name.toInt finally xs foreach (_.delete())
+ case x :: xs => try x.name.toInt catch {
+ case e: Exception => x.delete()
+ throw e
+ }
}
/** Get the port number to which a scala compile server is connected;
@@ -152,7 +160,8 @@ class CompileSocket extends CompileOutputCommon {
* create a new daemon if necessary. Returns None if the connection
* cannot be established.
*/
- def getOrCreateSocket(vmArgs: String, create: Boolean = true): Option[Socket] = {
+ def getOrCreateSocket(vmArgs: String, create: Boolean = true, fixedPort: Int = 0): Option[Socket] = {
+ fixPort = fixedPort
val maxMillis = 10L * 1000 // try for 10 seconds
val retryDelay = 50L
val maxAttempts = (maxMillis / retryDelay).toInt
@@ -186,14 +195,17 @@ class CompileSocket extends CompileOutputCommon {
try { Some(x.toInt) }
catch { case _: NumberFormatException => None }
- def getSocket(serverAdr: String): Socket = (
- for ((name, portStr) <- splitWhere(serverAdr, _ == ':', doDropIndex = true) ; port <- parseInt(portStr)) yield
+ def getSocket(serverAdr: String): Option[Socket] = (
+ for ((name, portStr) <- splitWhere(serverAdr, _ == ':', doDropIndex = true) ; port <- parseInt(portStr)) yield
getSocket(name, port)
) getOrElse fatal("Malformed server address: %s; exiting" format serverAdr)
- def getSocket(hostName: String, port: Int): Socket =
- Socket(hostName, port).opt getOrElse fatal("Unable to establish connection to server %s:%d; exiting".format(hostName, port))
-
+ def getSocket(hostName: String, port: Int): Option[Socket] = {
+ val sock = Socket(hostName, port).opt
+ if (sock.isEmpty) warn("Unable to establish connection to server %s:%d".format(hostName, port))
+ sock
+ }
+
def getPassword(port: Int): String = {
val ff = portFile(port)
val f = ff.bufferedReader()
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 9cc9712b44..e62dfd00a6 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -8,12 +8,13 @@ package tools
package nsc
import java.io.{ File, FileOutputStream, PrintWriter, IOException, FileNotFoundException }
+import java.net.URL
import java.nio.charset.{ Charset, CharsetDecoder, IllegalCharsetNameException, UnsupportedCharsetException }
import scala.compat.Platform.currentTime
import scala.collection.{ mutable, immutable }
import io.{ SourceReader, AbstractFile, Path }
import reporters.{ Reporter, ConsoleReporter }
-import util.{ ClassPath, StatisticsInfo, returning, stackTraceString }
+import util.{ ClassPath, MergedClassPath, StatisticsInfo, returning, stackTraceString }
import scala.reflect.ClassTag
import scala.reflect.internal.util.{ OffsetPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile }
import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
@@ -841,6 +842,150 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
} reverse
}
+ // ------------ REPL utilities ---------------------------------
+
+ /** Extend classpath of `platform` and rescan updated packages. */
+ def extendCompilerClassPath(urls: URL*): Unit = {
+ val newClassPath = platform.classPath.mergeUrlsIntoClassPath(urls: _*)
+ platform.currentClassPath = Some(newClassPath)
+ // Reload all specified jars into this compiler instance
+ invalidateClassPathEntries(urls.map(_.getPath): _*)
+ }
+
+ // ------------ Invalidations ---------------------------------
+
+ /** Is given package class a system package class that cannot be invalidated?
+ */
+ private def isSystemPackageClass(pkg: Symbol) =
+ pkg == RootClass || (pkg.hasTransOwner(definitions.ScalaPackageClass) && !pkg.hasTransOwner(this.rootMirror.staticPackage("scala.tools").moduleClass.asClass))
+
+ /** Invalidates packages that contain classes defined in a classpath entry, and
+ * rescans that entry.
+ *
+ * First, the classpath entry referred to by one of the `paths` is rescanned,
+ * so that any new files or changes in subpackages are picked up.
+ * Second, any packages for which one of the following conditions is met is invalidated:
+ * - the classpath entry contained during the last compilation run now contains classfiles
+ * that represent a member in the package;
+ * - the classpath entry now contains classfiles that represent a member in the package;
+ * - the set of subpackages has changed.
+ *
+ * The invalidated packages are reset in their entirety; all member classes and member packages
+ * are re-accessed using the new classpath.
+ *
+ * System packages that the compiler needs to access as part of standard definitions
+ * are not invalidated. A system package is:
+ * Any package rooted in "scala", with the exception of packages rooted in "scala.tools".
+ *
+ * @param paths Fully-qualified names that refer to directories or jar files that are
+ * entries on the classpath.
+ */
+ def invalidateClassPathEntries(paths: String*): Unit = {
+ implicit object ClassPathOrdering extends Ordering[PlatformClassPath] {
+ def compare(a:PlatformClassPath, b:PlatformClassPath) = a.asClasspathString compare b.asClasspathString
+ }
+ val invalidated, failed = new mutable.ListBuffer[ClassSymbol]
+ classPath match {
+ case cp: MergedClassPath[_] =>
+ def assoc(path: String): List[(PlatformClassPath, PlatformClassPath)] = {
+ val dir = AbstractFile.getDirectory(path)
+ val canonical = dir.canonicalPath
+ def matchesCanonical(e: ClassPath[_]) = e.origin match {
+ case Some(opath) =>
+ AbstractFile.getDirectory(opath).canonicalPath == canonical
+ case None =>
+ false
+ }
+ cp.entries find matchesCanonical match {
+ case Some(oldEntry) =>
+ List(oldEntry -> cp.context.newClassPath(dir))
+ case None =>
+ error(s"Error adding entry to classpath. During invalidation, no entry named $path in classpath $classPath")
+ List()
+ }
+ }
+ val subst = immutable.TreeMap(paths flatMap assoc: _*)
+ if (subst.nonEmpty) {
+ platform updateClassPath subst
+ informProgress(s"classpath updated on entries [${subst.keys mkString ","}]")
+ def mkClassPath(elems: Iterable[PlatformClassPath]): PlatformClassPath =
+ if (elems.size == 1) elems.head
+ else new MergedClassPath(elems, classPath.context)
+ val oldEntries = mkClassPath(subst.keys)
+ val newEntries = mkClassPath(subst.values)
+ mergeNewEntries(newEntries, RootClass, Some(classPath), Some(oldEntries), invalidated, failed)
+ }
+ }
+ def show(msg: String, syms: scala.collection.Traversable[Symbol]) =
+ if (syms.nonEmpty)
+ informProgress(s"$msg: ${syms map (_.fullName) mkString ","}")
+ show("invalidated packages", invalidated)
+ show("could not invalidate system packages", failed)
+ }
+
+ /** Merges new classpath entries into the symbol table
+ *
+ * @param newEntries The new classpath entries
+ * @param root The root symbol to be resynced (a package class)
+ * @param allEntries Optionally, the corresponding package in the complete current classpath
+ * @param oldEntries Optionally, the corresponding package in the old classpath entries
+ * @param invalidated A listbuffer collecting the invalidated package classes
+ * @param failed A listbuffer collecting system package classes which could not be invalidated
+ *
+ * The merging strategy is determined by the absence or presence of classes and packages.
+ *
+ * If either oldEntries or newEntries contains classes, root is invalidated provided that a corresponding package
+ * exists in allEntries. Otherwise it is removed.
+ * Otherwise, the action is determined by the following matrix, with columns:
+ *
+ * old sym action
+ * + + recurse into all child packages of newEntries
+ * - + invalidate root
+ * - - create and enter root
+ *
+ * Here, old means classpath, and sym means symboltable. + is presence of an entry in its column, - is absence.
+ */
+ private def mergeNewEntries(newEntries: PlatformClassPath, root: ClassSymbol,
+ allEntries: OptClassPath, oldEntries: OptClassPath,
+ invalidated: mutable.ListBuffer[ClassSymbol], failed: mutable.ListBuffer[ClassSymbol]) {
+ ifDebug(informProgress(s"syncing $root, $oldEntries -> $newEntries"))
+
+ val getName: ClassPath[AbstractFile] => String = (_.name)
+ def hasClasses(cp: OptClassPath) = cp.isDefined && cp.get.classes.nonEmpty
+ def invalidateOrRemove(root: ClassSymbol) = {
+ allEntries match {
+ case Some(cp) => root setInfo new loaders.PackageLoader(cp)
+ case None => root.owner.info.decls unlink root.sourceModule
+ }
+ invalidated += root
+ }
+ def subPackage(cp: PlatformClassPath, name: String): OptClassPath =
+ cp.packages find (cp1 => getName(cp1) == name)
+
+ val classesFound = hasClasses(oldEntries) || newEntries.classes.nonEmpty
+ if (classesFound && !isSystemPackageClass(root)) {
+ invalidateOrRemove(root)
+ } else {
+ if (classesFound) {
+ if (root.isRoot) invalidateOrRemove(EmptyPackageClass)
+ else failed += root
+ }
+ if (!oldEntries.isDefined) invalidateOrRemove(root)
+ else
+ for (pstr <- newEntries.packages.map(getName)) {
+ val pname = newTermName(pstr)
+ val pkg = (root.info decl pname) orElse {
+ // package does not exist in symbol table, create symbol to track it
+ assert(!subPackage(oldEntries.get, pstr).isDefined)
+ loaders.enterPackage(root, pstr, new loaders.PackageLoader(allEntries.get))
+ }
+ mergeNewEntries(subPackage(newEntries, pstr).get, pkg.moduleClass.asClass,
+ subPackage(allEntries.get, pstr), subPackage(oldEntries.get, pstr),
+ invalidated, failed)
+ }
+ }
+ }
+
// ----------- Runs ---------------------------------------
private var curRun: Run = null
@@ -1445,10 +1590,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
}
- /** Reset package class to state at typer (not sure what this
- * is needed for?)
+ /** Reset package class to state at typer (not sure what this is needed for?)
*/
- private def resetPackageClass(pclazz: Symbol) {
+ private def resetPackageClass(pclazz: Symbol): Unit = if (typerPhase != NoPhase) {
enteringPhase(firstPhase) {
pclazz.setInfo(enteringPhase(typerPhase)(pclazz.info))
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
index d3f495f280..f1517e56a0 100755
--- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
@@ -346,12 +346,11 @@ trait MarkupParsers {
// parse more XML ?
if (charComingAfter(xSpaceOpt()) == '<') {
- xSpaceOpt()
- while (ch == '<') {
+ do {
+ xSpaceOpt()
nextch()
ts append element
- xSpaceOpt()
- }
+ } while (charComingAfter(xSpaceOpt()) == '<')
handle.makeXMLseq(r2p(start, start, curOffset), ts)
}
else {
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
index 1abc0c860c..8cd915bf22 100755
--- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
@@ -184,7 +184,8 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
)
val uri1 = attrMap(z) match {
- case Apply(_, List(uri @ Literal(Constant(_)))) => mkAssign(uri)
+ case Apply(Select(New(Select(Select(Select(Ident(nme.ROOTPKG), nme.scala_), nme.xml), tpnme.Text)), nme.CONSTRUCTOR), List(uri @ Literal(Constant(_)))) =>
+ mkAssign(uri)
case Select(_, nme.Nil) => mkAssign(const(null)) // allow for xmlns="" -- bug #1626
case x => mkAssign(x)
}
diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
index 7236bf70d5..4877bd9b80 100644
--- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
+++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
@@ -16,7 +16,7 @@ trait JavaPlatform extends Platform {
import global._
import definitions._
- private var currentClassPath: Option[MergedClassPath[AbstractFile]] = None
+ private[nsc] var currentClassPath: Option[MergedClassPath[AbstractFile]] = None
def classPath: ClassPath[AbstractFile] = {
if (currentClassPath.isEmpty) currentClassPath = Some(new PathResolver(settings).result)
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index 351eb23c4c..aa18b26d93 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -26,7 +26,7 @@ import scala.reflect.internal.util.NoSourceFile
* where `p` is defined in a library L, and is accessed from a library C (for Client),
* where C was compiled against L', an optimized version of L where the inliner made `p` public at the bytecode level.
* The only such members are fields, either synthetic or isParamAccessor, and thus having a dollar sign in their name
- * (the accesibility of methods and constructors isn't touched by the inliner).
+ * (the accessibility of methods and constructors isn't touched by the inliner).
*
* Thus we add one more goal to our list:
* (c) Compile C (either optimized or not) against any of L or L',
diff --git a/src/compiler/scala/tools/nsc/settings/FscSettings.scala b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
index 8c2b510bfd..fffbb4333f 100644
--- a/src/compiler/scala/tools/nsc/settings/FscSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
@@ -22,13 +22,15 @@ class FscSettings(error: String => Unit) extends Settings(error) {
val reset = BooleanSetting("-reset", "Reset compile server caches")
val shutdown = BooleanSetting("-shutdown", "Shutdown compile server")
val server = StringSetting ("-server", "hostname:portnumber", "Specify compile server socket", "")
+ val port = IntSetting ("-port", "Search and start compile server in given port only",
+ 0, Some((0, Int.MaxValue)), (_: String) => None)
val preferIPv4 = BooleanSetting("-ipv4", "Use IPv4 rather than IPv6 for the server socket")
val idleMins = IntSetting ("-max-idle", "Set idle timeout in minutes for fsc (use 0 for no timeout)",
30, Some((0, Int.MaxValue)), (_: String) => None)
// For improved help output, separating fsc options from the others.
def fscSpecific = Set[Settings#Setting](
- currentDir, reset, shutdown, server, preferIPv4, idleMins
+ currentDir, reset, shutdown, server, port, preferIPv4, idleMins
)
val isFscSpecific: String => Boolean = fscSpecific map (_.name)
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
index d862805a07..22661d6ccf 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
@@ -544,10 +544,17 @@ trait MatchTranslation {
// wrong when isSeq, and resultInMonad should always be correct since it comes
// directly from the extractor's result type
val binder = freshSym(pos, pureType(resultInMonad))
+ val potentiallyMutableBinders: Set[Symbol] =
+ if (extractorApply.tpe.typeSymbol.isNonBottomSubClass(OptionClass) && !aligner.isSeq)
+ Set.empty
+ else
+ // Ensures we capture unstable bound variables eagerly. These can arise under name based patmat or by indexing into mutable Seqs. See run t9003.scala
+ subPatBinders.toSet
ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(
subPatBinders,
subPatRefs(binder),
+ potentiallyMutableBinders,
aligner.isBool,
checkedLength,
patBinderOrCasted,
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
index 3abec521df..3fd9ce76f8 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
@@ -192,13 +192,14 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
case class ExtractorTreeMaker(extractor: Tree, extraCond: Option[Tree], nextBinder: Symbol)(
val subPatBinders: List[Symbol],
val subPatRefs: List[Tree],
+ val potentiallyMutableBinders: Set[Symbol],
extractorReturnsBoolean: Boolean,
val checkedLength: Option[Int],
val prevBinder: Symbol,
val ignoredSubPatBinders: Set[Symbol]
) extends FunTreeMaker with PreserveSubPatBinders {
- def extraStoredBinders: Set[Symbol] = Set()
+ def extraStoredBinders: Set[Symbol] = potentiallyMutableBinders
debug.patmat(s"""
|ExtractorTreeMaker($extractor, $extraCond, $nextBinder) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
index 3a77cab919..fc632e0d0d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
@@ -11,12 +11,28 @@ import scala.language.postfixOps
/** On pattern matcher checkability:
*
+ * The spec says that case _: List[Int] should be always issue
+ * an unchecked warning:
+ *
+ * > Types which are not of one of the forms described above are
+ * > also accepted as type patterns. However, such type patterns
+ * > will be translated to their erasure (§3.7). The Scala compiler
+ * > will issue an “unchecked” warning for these patterns to flag
+ * > the possible loss of type-safety.
+ *
+ * But the implementation goes a little further to omit warnings
+ * based on the static type of the scrutinee. As a trivial example:
+ *
+ * def foo(s: Seq[Int]) = s match { case _: List[Int] => }
+ *
+ * need not issue this warning.
+ *
* Consider a pattern match of this form: (x: X) match { case _: P => }
*
* There are four possibilities to consider:
* [P1] X will always conform to P
* [P2] x will never conform to P
- * [P3] X <: P if some runtime test is true
+ * [P3] X will conform to P if some runtime test is true
* [P4] X cannot be checked against P
*
* The first two cases correspond to those when there is enough
@@ -28,6 +44,11 @@ import scala.language.postfixOps
* which is essentially the intersection of X and |P|, where |P| is
* the erasure of P. If XR <: P, then no warning is emitted.
*
+ * We evaluate "X with conform to P" by checking `X <: P_wild, where
+ * P_wild is the result of substituting wildcard types in place of
+ * pattern type variables. This is intentionally stricter than
+ * (X matchesPattern P), see SI-8597 for motivating test cases.
+ *
* Examples of how this info is put to use:
* sealed trait A[T] ; class B[T] extends A[T]
* def f(x: B[Int]) = x match { case _: A[Int] if true => }
@@ -100,7 +121,7 @@ trait Checkable {
private def typeArgsInTopLevelType(tp: Type): List[Type] = {
val tps = tp match {
case RefinedType(parents, _) => parents flatMap typeArgsInTopLevelType
- case TypeRef(_, ArrayClass, arg :: Nil) => typeArgsInTopLevelType(arg)
+ case TypeRef(_, ArrayClass, arg :: Nil) => if (arg.typeSymbol.isAbstractType) arg :: Nil else typeArgsInTopLevelType(arg)
case TypeRef(pre, sym, args) => typeArgsInTopLevelType(pre) ++ args
case ExistentialType(tparams, underlying) => tparams.map(_.tpe) ++ typeArgsInTopLevelType(underlying)
case _ => Nil
@@ -108,14 +129,31 @@ trait Checkable {
tps filterNot isUnwarnableTypeArg
}
+ private def scrutConformsToPatternType(scrut: Type, pattTp: Type): Boolean = {
+ def typeVarToWildcard(tp: Type) = {
+ // The need for typeSymbolDirect is demonstrated in neg/t8597b.scala
+ if (tp.typeSymbolDirect.isPatternTypeVariable) WildcardType else tp
+ }
+ val pattTpWild = pattTp.map(typeVarToWildcard)
+ scrut <:< pattTpWild
+ }
+
private class CheckabilityChecker(val X: Type, val P: Type) {
def Xsym = X.typeSymbol
def Psym = P.typeSymbol
- def XR = if (Xsym == AnyClass) classExistentialType(Psym) else propagateKnownTypes(X, Psym)
+ def PErased = {
+ P match {
+ case erasure.GenericArray(n, core) => existentialAbstraction(core.typeSymbol :: Nil, P)
+ case _ => existentialAbstraction(Psym.typeParams, Psym.tpe_*)
+ }
+ }
+ def XR = if (Xsym == AnyClass) PErased else propagateKnownTypes(X, Psym)
+
+
// sadly the spec says (new java.lang.Boolean(true)).isInstanceOf[scala.Boolean]
- def P1 = X matchesPattern P
+ def P1 = scrutConformsToPatternType(X, P)
def P2 = !Psym.isPrimitiveValueClass && isNeverSubType(X, P)
- def P3 = isNonRefinementClassType(P) && (XR matchesPattern P)
+ def P3 = isNonRefinementClassType(P) && scrutConformsToPatternType(XR, P)
def P4 = !(P1 || P2 || P3)
def summaryString = f"""
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index e278130437..8c2bc316ec 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -104,7 +104,7 @@ trait Contexts { self: Analyzer =>
// there must be a scala.xml package when xml literals were parsed in this unit
if (unit.hasXml && ScalaXmlPackage == NoSymbol)
- reporter.error(unit.firstXmlPos, "To compile XML syntax, the scala.xml package must be on the classpath.\nPlease see http://docs.scala-lang.org/overviews/core/scala-2.11.html#scala-xml.")
+ reporter.error(unit.firstXmlPos, "To compile XML syntax, the scala.xml package must be on the classpath.\nPlease see https://github.com/scala/scala-xml for details.")
// scala-xml needs `scala.xml.TopScope` to be in scope globally as `$scope`
// We detect `scala-xml` by looking for `scala.xml.TopScope` and
@@ -798,7 +798,7 @@ trait Contexts { self: Analyzer =>
isAccessible(sym, pre) &&
!(imported && {
val e = scope.lookupEntry(name)
- (e ne null) && (e.owner == scope)
+ (e ne null) && (e.owner == scope) && (!settings.isScala212 || e.sym.exists)
})
private def collectImplicits(syms: Scope, pre: Type, imported: Boolean = false): List[ImplicitInfo] =
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index a182a2e269..4d9a6a47ef 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -870,13 +870,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
def adaptType(): Tree = {
// @M When not typing a type constructor (!context.inTypeConstructorAllowed)
- // or raw type (tree.symbol.isJavaDefined && context.unit.isJava), types must be of kind *,
+ // or raw type, types must be of kind *,
// and thus parameterized types must be applied to their type arguments
// @M TODO: why do kind-* tree's have symbols, while higher-kinded ones don't?
def properTypeRequired = (
tree.hasSymbolField
&& !context.inTypeConstructorAllowed
- && !(tree.symbol.isJavaDefined && context.unit.isJava)
+ && !context.unit.isJava
)
// @M: don't check tree.tpe.symbol.typeParams. check tree.tpe.typeParams!!!
// (e.g., m[Int] --> tree.tpe.symbol.typeParams.length == 1, tree.tpe.typeParams.length == 0!)
@@ -5183,16 +5183,19 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
typed(tree.ref, MonoQualifierModes | mode.onlyTypePat, AnyRefTpe)
}
- if (!refTyped.isErrorTyped)
+ if (refTyped.isErrorTyped) {
+ setError(tree)
+ } else {
tree setType refTyped.tpe.resultType
-
- if (treeInfo.admitsTypeSelection(refTyped)) tree
- else UnstableTreeError(refTyped)
+ if (refTyped.isErrorTyped || treeInfo.admitsTypeSelection(refTyped)) tree
+ else UnstableTreeError(tree)
+ }
}
def typedSelectFromTypeTree(tree: SelectFromTypeTree) = {
val qual1 = typedType(tree.qualifier, mode)
- if (qual1.tpe.isVolatile) TypeSelectionFromVolatileTypeError(tree, qual1)
+ if (qual1.isErrorTyped) setError(treeCopy.SelectFromTypeTree(tree, qual1, tree.name))
+ else if (qual1.tpe.isVolatile) TypeSelectionFromVolatileTypeError(tree, qual1)
else typedSelect(tree, qual1, tree.name)
}
diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala
index e89f08ec6b..e78dee5eee 100644
--- a/src/compiler/scala/tools/nsc/util/ClassPath.scala
+++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala
@@ -197,6 +197,23 @@ abstract class ClassPath[T] {
def packages: IndexedSeq[ClassPath[T]]
def sourcepaths: IndexedSeq[AbstractFile]
+ /** The entries this classpath is composed of. In class `ClassPath` it's just the singleton list containing `this`.
+ * Subclasses such as `MergedClassPath` typically return lists with more elements.
+ */
+ def entries: IndexedSeq[ClassPath[T]] = IndexedSeq(this)
+
+ /** Merge classpath of `platform` and `urls` into merged classpath */
+ def mergeUrlsIntoClassPath(urls: URL*): MergedClassPath[T] = {
+ // Collect our new jars/directories and add them to the existing set of classpaths
+ val allEntries =
+ (entries ++
+ urls.map(url => context.newClassPath(io.AbstractFile.getURL(url)))
+ ).distinct
+
+ // Combine all of our classpaths (old and new) into one merged classpath
+ new MergedClassPath(allEntries, context)
+ }
+
/**
* Represents classes which can be loaded with a ClassfileLoader and/or SourcefileLoader.
*/
@@ -322,7 +339,7 @@ extends MergedClassPath[T](original.entries map (e => subst getOrElse (e, e)), o
* A classpath unifying multiple class- and sourcepath entries.
*/
class MergedClassPath[T](
- val entries: IndexedSeq[ClassPath[T]],
+ override val entries: IndexedSeq[ClassPath[T]],
val context: ClassPathContext[T])
extends ClassPath[T] {
def this(entries: TraversableOnce[ClassPath[T]], context: ClassPathContext[T]) =
diff --git a/src/compiler/scala/tools/util/SocketServer.scala b/src/compiler/scala/tools/util/SocketServer.scala
index 1d39a59cf4..7858bf0658 100644
--- a/src/compiler/scala/tools/util/SocketServer.scala
+++ b/src/compiler/scala/tools/util/SocketServer.scala
@@ -28,12 +28,12 @@ trait CompileOutputCommon {
* @author Martin Odersky
* @version 1.0
*/
-abstract class SocketServer extends CompileOutputCommon {
+abstract class SocketServer(fixPort: Int = 0) extends CompileOutputCommon {
def shutdown: Boolean
def session(): Unit
def timeout(): Unit = () // called after a timeout is detected for subclasses to cleanup
// a hook for subclasses
- protected def createServerSocket(): ServerSocket = new ServerSocket(0)
+ protected def createServerSocket(): ServerSocket = new ServerSocket(fixPort)
var in: BufferedReader = _
var out: PrintWriter = _
diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala
index d4b9c17eab..e11d1b35d7 100644
--- a/src/library/scala/Enumeration.scala
+++ b/src/library/scala/Enumeration.scala
@@ -239,6 +239,7 @@ abstract class Enumeration (initial: Int) extends Serializable {
*
* @param nnIds The set of ids of values (adjusted so that the lowest value does
* not fall below zero), organized as a `BitSet`.
+ * @define Coll `collection.immutable.SortedSet`
*/
class ValueSet private[ValueSet] (private[this] var nnIds: immutable.BitSet)
extends AbstractSet[Value]
diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala
index 42cb37aa24..a7e06b4d1a 100755
--- a/src/library/scala/collection/IndexedSeqOptimized.scala
+++ b/src/library/scala/collection/IndexedSeqOptimized.scala
@@ -141,10 +141,10 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] {
def drop(n: Int): Repr = slice(n, length)
override /*IterableLike*/
- def takeRight(n: Int): Repr = slice(length - n, length)
+ def takeRight(n: Int): Repr = slice(length - math.max(n, 0), length)
override /*IterableLike*/
- def dropRight(n: Int): Repr = slice(0, length - n)
+ def dropRight(n: Int): Repr = slice(0, length - math.max(n, 0))
override /*TraversableLike*/
def splitAt(n: Int): (Repr, Repr) = (take(n), drop(n))
diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala
index 668190f700..b84d90c51b 100644
--- a/src/library/scala/collection/IterableViewLike.scala
+++ b/src/library/scala/collection/IterableViewLike.scala
@@ -150,10 +150,10 @@ trait IterableViewLike[+A,
sliding(size, 1) // we could inherit this, but that implies knowledge of the way the super class is implemented.
override def dropRight(n: Int): This =
- take(thisSeq.length - n)
+ take(thisSeq.length - math.max(n, 0))
override def takeRight(n: Int): This =
- drop(thisSeq.length - n)
+ drop(thisSeq.length - math.max(n, 0))
override def stringPrefix = "IterableView"
}
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index 0115cc154c..20712f918c 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -322,8 +322,8 @@ trait Iterator[+A] extends TraversableOnce[A] {
*/
def drop(n: Int): Iterator[A] = {
var j = 0
- while (j < n && this.hasNext) {
- this.next
+ while (j < n && hasNext) {
+ next()
j += 1
}
this
diff --git a/src/library/scala/collection/JavaConverters.scala b/src/library/scala/collection/JavaConverters.scala
index a4fa58b13c..875f6e1c02 100755
--- a/src/library/scala/collection/JavaConverters.scala
+++ b/src/library/scala/collection/JavaConverters.scala
@@ -37,8 +37,8 @@ import convert._
* val sl2 : scala.collection.mutable.Buffer[Int] = jl.asScala
* assert(sl eq sl2)
* }}}
- * The following conversions also are supported, but the
- * direction Scala to Java is done my a more specifically named method:
+ * The following conversions are also supported, but the
+ * direction from Scala to Java is done by the more specifically named methods:
* `asJavaCollection`, `asJavaEnumeration`, `asJavaDictionary`.
*
* - `scala.collection.Iterable` <=> `java.util.Collection`
diff --git a/src/library/scala/collection/LinearSeq.scala b/src/library/scala/collection/LinearSeq.scala
index 49fbb902ab..5a7bb5891e 100644
--- a/src/library/scala/collection/LinearSeq.scala
+++ b/src/library/scala/collection/LinearSeq.scala
@@ -15,7 +15,14 @@ import generic._
import mutable.Builder
/** A base trait for linear sequences.
+ *
* $linearSeqInfo
+ *
+ * @define linearSeqInfo
+ * Linear sequences have reasonably efficient `head`, `tail`, and `isEmpty` methods.
+ * If these methods provide the fastest way to traverse the collection, a
+ * collection `Coll` that extends this trait should also extend
+ * `LinearSeqOptimized[A, Coll[A]]`.
*/
trait LinearSeq[+A] extends Seq[A]
with GenericTraversableTemplate[A, LinearSeq]
diff --git a/src/library/scala/collection/LinearSeqLike.scala b/src/library/scala/collection/LinearSeqLike.scala
index ff7985bf0d..96e2135fd1 100644
--- a/src/library/scala/collection/LinearSeqLike.scala
+++ b/src/library/scala/collection/LinearSeqLike.scala
@@ -14,22 +14,10 @@ import scala.annotation.tailrec
/** A template trait for linear sequences of type `LinearSeq[A]`.
*
- * $linearSeqInfo
- *
- * This trait just implements `iterator` in terms of `isEmpty, ``head`, and `tail`.
- * However, see `LinearSeqOptimized` for an implementation trait that overrides operations
+ * This trait just implements `iterator` and `corresponds` in terms of `isEmpty, ``head`, and `tail`.
+ * However, see `LinearSeqOptimized` for an implementation trait that overrides many more operations
* to make them run faster under the assumption of fast linear access with `head` and `tail`.
*
- * @define linearSeqInfo
- * Linear sequences are defined in terms of three abstract methods, which are assumed
- * to have efficient implementations. These are:
- * {{{
- * def isEmpty: Boolean
- * def head: A
- * def tail: Repr
- * }}}
- * Here, `A` is the type of the sequence elements and `Repr` is the type of the sequence itself.
- *
* Linear sequences do not add any new methods to `Seq`, but promise efficient implementations
* of linear access patterns.
* @author Martin Odersky
@@ -58,12 +46,18 @@ trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]] extends SeqLike[A, Repr
val result = these.head; these = these.tail; result
} else Iterator.empty.next()
- /** Have to clear `these` so the iterator is exhausted like
- * it would be without the optimization.
- */
override def toList: List[A] = {
+ /* Have to clear `these` so the iterator is exhausted like
+ * it would be without the optimization.
+ *
+ * Calling "newBuilder.result()" in toList method
+ * prevents original seq from garbage collection,
+ * so we use these.take(0) here.
+ *
+ * Check SI-8924 for details
+ */
val xs = these.toList
- these = newBuilder.result()
+ these = these.take(0)
xs
}
}
diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala
index a28d796d5b..64248aa755 100755
--- a/src/library/scala/collection/LinearSeqOptimized.scala
+++ b/src/library/scala/collection/LinearSeqOptimized.scala
@@ -13,10 +13,24 @@ import mutable.ListBuffer
import immutable.List
import scala.annotation.tailrec
-/** A template trait for linear sequences of type `LinearSeq[A]` which optimizes
- * the implementation of several methods under the assumption of fast linear access.
+/** A template trait for linear sequences of type `LinearSeq[A]` which optimizes
+ * the implementation of various methods under the assumption of fast linear access.
+ *
+ * $linearSeqOptim
+ *
+ * @define linearSeqOptim
+ * Linear-optimized sequences implement most operations in in terms of three methods,
+ * which are assumed to have efficient implementations. These are:
+ * {{{
+ * def isEmpty: Boolean
+ * def head: A
+ * def tail: Repr
+ * }}}
+ * Here, `A` is the type of the sequence elements and `Repr` is the type of the sequence itself.
+ * Note that default implementations are provided via inheritance, but these
+ * should be overridden for performance.
+ *
*
- * $linearSeqInfo
*/
trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends LinearSeqLike[A, Repr] { self: Repr =>
@@ -235,13 +249,16 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
override /*IterableLike*/
def sameElements[B >: A](that: GenIterable[B]): Boolean = that match {
case that1: LinearSeq[_] =>
- var these = this
- var those = that1
- while (!these.isEmpty && !those.isEmpty && these.head == those.head) {
- these = these.tail
- those = those.tail
+ // Probably immutable, so check reference identity first (it's quick anyway)
+ (this eq that1) || {
+ var these = this
+ var those = that1
+ while (!these.isEmpty && !those.isEmpty && these.head == those.head) {
+ these = these.tail
+ those = those.tail
+ }
+ these.isEmpty && those.isEmpty
}
- these.isEmpty && those.isEmpty
case _ =>
super.sameElements(that)
}
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index fdfb1f2efc..329273df5b 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -140,7 +140,15 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
if (isEmpty) Iterator(repr)
else new PermutationsItr
- /** Iterates over combinations.
+ /** Iterates over combinations. A _combination_ of length `n` is a subsequence of
+ * the original sequence, with the elements taken in order. Thus, `"xy"` and `"yy"`
+ * are both length-2 combinations of `"xyy"`, but `"yx"` is not. If there is
+ * more than one way to generate the same subsequence, only one will be returned.
+ *
+ * For example, `"xyyy"` has three different ways to generate `"xy"` depending on
+ * whether the first, second, or third `"y"` is selected. However, since all are
+ * identical, only one will be chosen. Which of the three will be taken is an
+ * implementation detail that is not defined.
*
* @return An Iterator which traverses the possible n-element combinations of this $coll.
* @example `"abbbc".combinations(2) = Iterator(ab, ac, bb, bc)`
diff --git a/src/library/scala/collection/SeqViewLike.scala b/src/library/scala/collection/SeqViewLike.scala
index e719f19c78..ef6d2272cb 100644
--- a/src/library/scala/collection/SeqViewLike.scala
+++ b/src/library/scala/collection/SeqViewLike.scala
@@ -83,6 +83,7 @@ trait SeqViewLike[+A,
}
def length = index(self.length)
def apply(idx: Int) = {
+ if (idx < 0 || idx >= self.length) throw new IndexOutOfBoundsException(idx.toString)
val row = findRow(idx, 0, self.length - 1)
mapping(self(row)).seq.toSeq(idx - index(row))
}
diff --git a/src/library/scala/collection/concurrent/Map.scala b/src/library/scala/collection/concurrent/Map.scala
index 02e5dd01f5..2eea15b8dc 100644
--- a/src/library/scala/collection/concurrent/Map.scala
+++ b/src/library/scala/collection/concurrent/Map.scala
@@ -20,7 +20,7 @@ package collection.concurrent
* @tparam A the key type of the map
* @tparam B the value type of the map
*
- * @define Coll `ConcurrentMap`
+ * @define Coll `concurrent.Map`
* @define coll concurrent map
* @define concurrentmapinfo
* This is a base trait for all Scala concurrent map implementations. It
diff --git a/src/library/scala/collection/generic/GenericTraversableTemplate.scala b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
index 64cf1cfb1e..54455c531a 100644
--- a/src/library/scala/collection/generic/GenericTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
@@ -25,7 +25,7 @@ import scala.language.higherKinds
* @author Martin Odersky
* @since 2.8
* @define coll collection
- * @define Coll CC
+ * @define Coll Traversable
*/
trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNewBuilder[A, CC[A] @uncheckedVariance] {
diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala
index 3a64820be6..f11217d26a 100644
--- a/src/library/scala/collection/immutable/PagedSeq.scala
+++ b/src/library/scala/collection/immutable/PagedSeq.scala
@@ -158,7 +158,7 @@ extends scala.collection.AbstractSeq[T]
* @note Calling this method will force the entire sequence to be read.
*/
def length: Int = {
- while (!latest.isLast) addMore()
+ while (!latest.isLast && latest.end < end) addMore()
(latest.end min end) - start
}
@@ -175,7 +175,8 @@ extends scala.collection.AbstractSeq[T]
*/
override def isDefinedAt(index: Int) =
index >= 0 && index < end - start && {
- val p = page(index + start); index + start < p.end
+ val absidx = index + start
+ absidx >= 0 && absidx < page(absidx).end
}
/** The subsequence from index `start` up to `end -1` if `end`
@@ -192,6 +193,9 @@ extends scala.collection.AbstractSeq[T]
if (f.next eq null) f.addMore(more)
f = f.next
}
+ // Warning -- not refining `more` means that slices can freely request and obtain
+ // data outside of their slice. This is part of the design of PagedSeq
+ // (to read pages!) but can be surprising.
new PagedSeq(more, f, s, e)
}
diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala
index 8cc99a53e6..662075cd93 100644
--- a/src/library/scala/collection/immutable/TreeMap.scala
+++ b/src/library/scala/collection/immutable/TreeMap.scala
@@ -101,8 +101,8 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
else new TreeMap(RB.slice(tree, from, until))
}
- override def dropRight(n: Int) = take(size - n)
- override def takeRight(n: Int) = drop(size - n)
+ override def dropRight(n: Int) = take(size - math.max(n, 0))
+ override def takeRight(n: Int) = drop(size - math.max(n, 0))
override def splitAt(n: Int) = (take(n), drop(n))
private[this] def countWhile(p: ((A, B)) => Boolean): Int = {
diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala
index 681dbbd1a8..7378211db0 100644
--- a/src/library/scala/collection/immutable/TreeSet.scala
+++ b/src/library/scala/collection/immutable/TreeSet.scala
@@ -87,8 +87,8 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin
else newSet(RB.slice(tree, from, until))
}
- override def dropRight(n: Int) = take(size - n)
- override def takeRight(n: Int) = drop(size - n)
+ override def dropRight(n: Int) = take(size - math.max(n, 0))
+ override def takeRight(n: Int) = drop(size - math.max(n, 0))
override def splitAt(n: Int) = (take(n), drop(n))
private[this] def countWhile(p: A => Boolean): Int = {
diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala
index 8df606222f..8f77114746 100644
--- a/src/library/scala/collection/mutable/ArrayBuffer.scala
+++ b/src/library/scala/collection/mutable/ArrayBuffer.scala
@@ -30,8 +30,8 @@ import parallel.mutable.ParArray
*
* @tparam A the type of this arraybuffer's elements.
*
- * @define Coll `ArrayBuffer`
- * @define coll arraybuffer
+ * @define Coll `mutable.ArrayBuffer`
+ * @define coll array buffer
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `ArrayBuffer[B]` because an implicit of type `CanBuildFrom[ArrayBuffer, B, ArrayBuffer[B]]`
* is defined in object `ArrayBuffer`.
diff --git a/src/library/scala/collection/mutable/MapLike.scala b/src/library/scala/collection/mutable/MapLike.scala
index 6230fc23aa..471cd1cdde 100644
--- a/src/library/scala/collection/mutable/MapLike.scala
+++ b/src/library/scala/collection/mutable/MapLike.scala
@@ -18,6 +18,8 @@ import scala.collection.parallel.mutable.ParMap
/** A template trait for mutable maps.
* $mapNote
* $mapTags
+ * @define Coll `mutable.Map`
+ * @define coll mutable map
* @since 2.8
*
* @define mapNote
diff --git a/src/library/scala/collection/mutable/MultiMap.scala b/src/library/scala/collection/mutable/MultiMap.scala
index 78dfc35268..ac2ebf31d8 100644
--- a/src/library/scala/collection/mutable/MultiMap.scala
+++ b/src/library/scala/collection/mutable/MultiMap.scala
@@ -65,10 +65,9 @@ trait MultiMap[A, B] extends Map[A, Set[B]] {
*/
protected def makeSet: Set[B] = new HashSet[B]
- /** Assigns the specified `value` to a specified `key`, replacing
- * the existing value assigned to that `key` if it is equal to
- * the specified value. Otherwise, simply adds another binding to
- * the `key`.
+ /** Assigns the specified `value` to a specified `key`. If the key
+ * already has a binding to equal to `value`, nothing is changed;
+ * otherwise a new binding is added for that `key`.
*
* @param key The key to which to bind the new value.
* @param value The value to bind to the key.
diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala
index a0d3ee0ef0..b852a4747b 100644
--- a/src/library/scala/collection/mutable/MutableList.scala
+++ b/src/library/scala/collection/mutable/MutableList.scala
@@ -22,6 +22,8 @@ import immutable.{List, Nil}
* @author Martin Odersky
* @version 2.8
* @since 1
+ * @define Coll `mutable.MutableList`
+ * @define coll mutable list
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#mutable_lists "Scala's Collection Library overview"]]
* section on `Mutable Lists` for more information.
*/
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index 4a9a5d4008..d3c4161e3b 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -247,13 +247,6 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
* @return a priority queue with the same elements.
*/
override def clone(): PriorityQueue[A] = new PriorityQueue[A] ++= this.iterator
-
- // def printstate() {
- // println("-----------------------")
- // println("Size: " + resarr.p_size0)
- // println("Internal array: " + resarr.p_array.toList)
- // println(toString)
- // }
}
diff --git a/src/library/scala/collection/mutable/StringBuilder.scala b/src/library/scala/collection/mutable/StringBuilder.scala
index 498e9e461e..c56d40786e 100644
--- a/src/library/scala/collection/mutable/StringBuilder.scala
+++ b/src/library/scala/collection/mutable/StringBuilder.scala
@@ -22,6 +22,8 @@ import immutable.StringLike
* @author Martin Odersky
* @version 2.8
* @since 2.7
+ * @define Coll `mutable.IndexedSeq`
+ * @define coll string builder
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html# "Scala's Collection Library overview"]]
* section on `StringBuilders` for more information.
*/
diff --git a/src/library/scala/collection/parallel/ParIterable.scala b/src/library/scala/collection/parallel/ParIterable.scala
index 2ceeb18eef..a5ba8c49ad 100644
--- a/src/library/scala/collection/parallel/ParIterable.scala
+++ b/src/library/scala/collection/parallel/ParIterable.scala
@@ -23,9 +23,6 @@ import scala.collection.parallel.mutable.ParArrayCombiner
*
* @author Aleksandar Prokopec
* @since 2.9
- *
- * @define Coll `ParIterable`
- * @define coll parallel iterable
*/
trait ParIterable[+T]
extends GenIterable[T]
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 445edd23cb..2b54e05841 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -150,7 +150,8 @@ import scala.collection.parallel.ParallelCollectionImplicits._
* @define indexsignalling
* This method will use `indexFlag` signalling capabilities. This means
* that splitters may set and read the `indexFlag` state.
- *
+ * @define Coll `ParIterable`
+ * @define coll parallel iterable
*/
trait ParIterableLike[+T, +Repr <: ParIterable[T], +Sequential <: Iterable[T] with IterableLike[T, Sequential]]
extends GenIterableLike[T, Repr]
diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala
index d2b15c727a..ee1334ba55 100644
--- a/src/library/scala/collection/parallel/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/ParMapLike.scala
@@ -24,6 +24,8 @@ import scala.collection.generic.Signalling
*
* @tparam K the key type of the map
* @tparam V the value type of the map
+ * @define Coll `ParMap`
+ * @define coll parallel map
*
* @author Aleksandar Prokopec
* @since 2.9
diff --git a/src/library/scala/collection/parallel/ParSetLike.scala b/src/library/scala/collection/parallel/ParSetLike.scala
index 4e9a2e5751..4feda5ff07 100644
--- a/src/library/scala/collection/parallel/ParSetLike.scala
+++ b/src/library/scala/collection/parallel/ParSetLike.scala
@@ -20,6 +20,8 @@ import scala.collection.Set
* $sideeffects
*
* @tparam T the element type of the set
+ * @define Coll `ParSet`
+ * @define coll parallel set
*
* @author Aleksandar Prokopec
* @since 2.9
diff --git a/src/library/scala/collection/parallel/mutable/ParMapLike.scala b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
index 42027f5bac..5d99394a50 100644
--- a/src/library/scala/collection/parallel/mutable/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
@@ -22,6 +22,8 @@ import scala.collection.generic.Shrinkable
*
* @tparam K the key type of the map
* @tparam V the value type of the map
+ * @define Coll `ParMap`
+ * @define coll parallel map
*
* @author Aleksandar Prokopec
* @since 2.9
diff --git a/src/library/scala/collection/parallel/mutable/ParSet.scala b/src/library/scala/collection/parallel/mutable/ParSet.scala
index 9367f1424d..4e2d3e0e4c 100644
--- a/src/library/scala/collection/parallel/mutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSet.scala
@@ -14,9 +14,6 @@ import scala.collection.parallel.Combiner
/** A mutable variant of `ParSet`.
*
- * @define Coll `mutable.ParSet`
- * @define coll mutable parallel set
- *
* @author Aleksandar Prokopec
*/
trait ParSet[T]
diff --git a/src/library/scala/collection/parallel/mutable/ParSetLike.scala b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
index 13af5ed649..08aa3b024b 100644
--- a/src/library/scala/collection/parallel/mutable/ParSetLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
@@ -21,6 +21,8 @@ import scala.collection.generic.Shrinkable
* $sideeffects
*
* @tparam T the element type of the set
+ * @define Coll `mutable.ParSet`
+ * @define coll mutable parallel set
*
* @author Aleksandar Prokopec
* @since 2.9
diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala
index 11d3bb8b02..e380c55880 100644
--- a/src/library/scala/concurrent/ExecutionContext.scala
+++ b/src/library/scala/concurrent/ExecutionContext.scala
@@ -110,8 +110,9 @@ object ExecutionContext {
* The explicit global `ExecutionContext`. Invoke `global` when you want to provide the global
* `ExecutionContext` explicitly.
*
- * The default `ExecutionContext` implementation is backed by a port of
- * [[http://gee.cs.oswego.edu/dl/jsr166/dist/jsr166-4jdk7docs/java/util/concurrent/ForkJoinPool.html java.util.concurrent.ForkJoinPool]].
+ * The default `ExecutionContext` implementation is backed by a work-stealing thread pool. By default,
+ * the thread pool uses a target number of worker threads equal to the number of
+ * [[https://docs.oracle.com/javase/8/docs/api/java/lang/Runtime.html#availableProcessors-- available processors]].
*
* @return the global `ExecutionContext`
*/
@@ -122,15 +123,16 @@ object ExecutionContext {
* The implicit global `ExecutionContext`. Import `global` when you want to provide the global
* `ExecutionContext` implicitly.
*
- * The default `ExecutionContext` implementation is backed by a port of
- * [[http://gee.cs.oswego.edu/dl/jsr166/dist/jsr166-4jdk7docs/java/util/concurrent/ForkJoinPool.html java.util.concurrent.ForkJoinPool]].
+ * The default `ExecutionContext` implementation is backed by a work-stealing thread pool. By default,
+ * the thread pool uses a target number of worker threads equal to the number of
+ * [[https://docs.oracle.com/javase/8/docs/api/java/lang/Runtime.html#availableProcessors-- available processors]].
*/
implicit lazy val global: ExecutionContextExecutor = impl.ExecutionContextImpl.fromExecutor(null: Executor)
}
/** Creates an `ExecutionContext` from the given `ExecutorService`.
*
- * @param e the `ExecutorService` to use
+ * @param e the `ExecutorService` to use. If `null`, a new `ExecutorService` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]].
* @param reporter a function for error reporting
* @return the `ExecutionContext` using the given `ExecutorService`
*/
@@ -147,14 +149,14 @@ object ExecutionContext {
* val ec = ExecutionContext.fromExecutorService(Executors.newSingleThreadExecutor())
* }}}
*
- * @param e the `ExecutorService` to use
+ * @param e the `ExecutorService` to use. If `null`, a new `ExecutorService` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]].
* @return the `ExecutionContext` using the given `ExecutorService`
*/
def fromExecutorService(e: ExecutorService): ExecutionContextExecutorService = fromExecutorService(e, defaultReporter)
/** Creates an `ExecutionContext` from the given `Executor`.
*
- * @param e the `Executor` to use
+ * @param e the `Executor` to use. If `null`, a new `Executor` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]].
* @param reporter a function for error reporting
* @return the `ExecutionContext` using the given `Executor`
*/
@@ -163,7 +165,7 @@ object ExecutionContext {
/** Creates an `ExecutionContext` from the given `Executor` with the [[scala.concurrent.ExecutionContext$.defaultReporter default reporter]].
*
- * @param e the `Executor` to use
+ * @param e the `Executor` to use. If `null`, a new `Executor` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]].
* @return the `ExecutionContext` using the given `Executor`
*/
def fromExecutor(e: Executor): ExecutionContextExecutor = fromExecutor(e, defaultReporter)
diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala
index 5a81710986..74a174ea74 100644
--- a/src/library/scala/math/BigDecimal.scala
+++ b/src/library/scala/math/BigDecimal.scala
@@ -417,7 +417,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
private final def computeHashCode(): Unit = {
computedHashCode =
if (isWhole && (precision - scale) < BigDecimal.maximumHashScale) toBigInt.hashCode
- else if (isValidDouble) doubleValue.##
+ else if (isDecimalDouble) doubleValue.##
else {
val temp = bigDecimal.stripTrailingZeros
scala.util.hashing.MurmurHash3.mixLast( temp.scaleByPowerOfTen(temp.scale).toBigInteger.hashCode, temp.scale )
@@ -477,7 +477,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
* `isExactDouble`, `isBinaryDouble`, or `isDecimalDouble`, depending on the intended meaning.
* By default, `decimal` creation is used, so `isDecimalDouble` is probably what you want.
*/
- @deprecated("Validity has two distinct meanings. Use `isExactBinaryDouble` or `equivalentToDouble` instead.", "2.11")
+ @deprecated("Validity has distinct meanings. Use `isExactDouble`, `isBinaryDouble`, or `isDecimalDouble` instead.", "2.11")
def isValidDouble = {
val d = toDouble
!d.isInfinity && bigDecimal.compareTo(new BigDec(d)) == 0
diff --git a/src/library/scala/runtime/Tuple2Zipped.scala b/src/library/scala/runtime/Tuple2Zipped.scala
index b28f6d4269..512c4fbc27 100644
--- a/src/library/scala/runtime/Tuple2Zipped.scala
+++ b/src/library/scala/runtime/Tuple2Zipped.scala
@@ -17,6 +17,10 @@ import scala.language.{ higherKinds, implicitConversions }
/** This interface is intended as a minimal interface, not complicated
* by the requirement to resolve type constructors, for implicit search (which only
* needs to find an implicit conversion to Traversable for our purposes.)
+ * @define Coll `ZippedTraversable2`
+ * @define coll collection
+ * @define collectExample
+ * @define willNotTerminateInf
*/
trait ZippedTraversable2[+El1, +El2] extends Any {
def foreach[U](f: (El1, El2) => U): Unit
diff --git a/src/library/scala/runtime/Tuple3Zipped.scala b/src/library/scala/runtime/Tuple3Zipped.scala
index 7c501380a3..ffd44acf81 100644
--- a/src/library/scala/runtime/Tuple3Zipped.scala
+++ b/src/library/scala/runtime/Tuple3Zipped.scala
@@ -14,7 +14,12 @@ import scala.collection.{ TraversableLike, IterableLike }
import scala.collection.generic.{ CanBuildFrom => CBF }
import scala.language.{ higherKinds, implicitConversions }
-/** See comment on ZippedTraversable2. */
+/** See comment on ZippedTraversable2
+ * @define Coll `ZippedTraversable3`
+ * @define coll collection
+ * @define collectExample
+ * @define willNotTerminateInf
+ */
trait ZippedTraversable3[+El1, +El2, +El3] extends Any {
def foreach[U](f: (El1, El2, El3) => U): Unit
}
diff --git a/src/library/scala/sys/SystemProperties.scala b/src/library/scala/sys/SystemProperties.scala
index 39f66f5030..d2ebf8c044 100644
--- a/src/library/scala/sys/SystemProperties.scala
+++ b/src/library/scala/sys/SystemProperties.scala
@@ -21,6 +21,8 @@ import scala.language.implicitConversions
* System properties. If a security manager is in place which prevents
* the properties from being read or written, the AccessControlException
* will be caught and discarded.
+ * @define Coll `collection.mutable.Map`
+ * @define coll mutable map
*
* @author Paul Phillips
* @version 2.9
diff --git a/src/reflect/scala/reflect/api/Constants.scala b/src/reflect/scala/reflect/api/Constants.scala
index e73c5ffa91..fbcf7f3e4f 100644
--- a/src/reflect/scala/reflect/api/Constants.scala
+++ b/src/reflect/scala/reflect/api/Constants.scala
@@ -60,7 +60,7 @@ package api
*
* object Test extends App {
* val jann = typeOf[JavaAnnottee].typeSymbol.annotations(0).javaArgs
- * def jarg(name: String) = jann(newTermName(name)).asInstanceOf[LiteralArgument].value
+ * def jarg(name: String) = jann(TermName(name)).asInstanceOf[LiteralArgument].value
*
* val classRef = jarg("classRef").typeValue
* println(showRaw(classRef)) // TypeRef(ThisType(<empty>), JavaAnnottee, List())
@@ -150,7 +150,7 @@ trait Constants {
*
* object Test extends App {
* val jann = typeOf[JavaAnnottee].typeSymbol.annotations(0).javaArgs
- * def jarg(name: String) = jann(newTermName(name)) match {
+ * def jarg(name: String) = jann(TermName(name)) match {
* // Constant is always wrapped into a Literal or LiteralArgument tree node
* case LiteralArgument(ct: Constant) => value
* case _ => sys.error("Not a constant")
diff --git a/src/reflect/scala/reflect/api/Exprs.scala b/src/reflect/scala/reflect/api/Exprs.scala
index 3230fdbc67..ad03718898 100644
--- a/src/reflect/scala/reflect/api/Exprs.scala
+++ b/src/reflect/scala/reflect/api/Exprs.scala
@@ -84,7 +84,7 @@ trait Exprs { self: Universe =>
*
* It is equivalent to
* {{{
- * Select( expr.tree, newTermName("foo") )
+ * Select( expr.tree, TermName("foo") )
* }}}
*
* The following example code however does not compile
diff --git a/src/reflect/scala/reflect/api/FlagSets.scala b/src/reflect/scala/reflect/api/FlagSets.scala
index bf4d6353df..bcad84a3f0 100644
--- a/src/reflect/scala/reflect/api/FlagSets.scala
+++ b/src/reflect/scala/reflect/api/FlagSets.scala
@@ -20,20 +20,20 @@ import scala.language.implicitConversions
*
* For example, to create a class named `C` one would write something like:
* {{{
- * ClassDef(Modifiers(NoFlags), newTypeName("C"), Nil, ...)
+ * ClassDef(Modifiers(NoFlags), TypeName("C"), Nil, ...)
* }}}
*
* Here, the flag set is empty.
*
* To make `C` private, one would write something like:
* {{{
- * ClassDef(Modifiers(PRIVATE), newTypeName("C"), Nil, ...)
+ * ClassDef(Modifiers(PRIVATE), TypeName("C"), Nil, ...)
* }}}
*
* Flags can also be combined with the vertical bar operator (`|`).
* For example, a private final class is written something like:
* {{{
- * ClassDef(Modifiers(PRIVATE | FINAL), newTypeName("C"), Nil, ...)
+ * ClassDef(Modifiers(PRIVATE | FINAL), TypeName("C"), Nil, ...)
* }}}
*
* The list of all available flags is defined in [[scala.reflect.api.FlagSets#FlagValues]], available via
diff --git a/src/reflect/scala/reflect/api/Mirror.scala b/src/reflect/scala/reflect/api/Mirror.scala
index 318fdb369a..96aab48e75 100644
--- a/src/reflect/scala/reflect/api/Mirror.scala
+++ b/src/reflect/scala/reflect/api/Mirror.scala
@@ -58,7 +58,7 @@ abstract class Mirror[U <: Universe with Singleton] {
* scala> cm.staticPackage("scala")
* res2: scala.reflect.runtime.universe.ModuleSymbol = package scala
*
- * scala> res2.moduleClass.info member newTypeName("List")
+ * scala> res2.moduleClass.info member TypeName("List")
* res3: scala.reflect.runtime.universe.Symbol = type List
*
* scala> res3.fullName
diff --git a/src/reflect/scala/reflect/api/Mirrors.scala b/src/reflect/scala/reflect/api/Mirrors.scala
index ec420d184c..773c6b6fb4 100644
--- a/src/reflect/scala/reflect/api/Mirrors.scala
+++ b/src/reflect/scala/reflect/api/Mirrors.scala
@@ -292,7 +292,7 @@ trait Mirrors { self: Universe =>
* that can be used to create instances of the class, inspect its companion object or perform further reflections.
*
* To get a class symbol by the name of the class you would like to reflect,
- * use `<this mirror>.symbol.info.member(newTypeName(<name of the class>)).asClass`.
+ * use `<this mirror>.symbol.info.member(TypeName(<name of the class>)).asClass`.
* For further information about member lookup refer to `Symbol.info`.
*
* The input symbol can be either private or non-private (Scala reflection transparently deals with visibility).
diff --git a/src/reflect/scala/reflect/api/Names.scala b/src/reflect/scala/reflect/api/Names.scala
index fe5f47c25d..03f7b2d218 100644
--- a/src/reflect/scala/reflect/api/Names.scala
+++ b/src/reflect/scala/reflect/api/Names.scala
@@ -17,11 +17,11 @@ import scala.language.implicitConversions
* To search for the `map` method (which is a term) declared in the `List` class, one can do:
*
* {{{
- * scala> typeOf[List[_]].member(newTermName("map"))
+ * scala> typeOf[List[_]].member(TermName("map"))
* res0: reflect.runtime.universe.Symbol = method map
* }}}
*
- * To search for a type member, one can follow the same procedure, using `newTypeName` instead.
+ * To search for a type member, one can follow the same procedure, using `TypeName` instead.
*
* For more information about creating and using `Name`s, see the [[http://docs.scala-lang.org/overviews/reflection/annotations-names-scopes.html Reflection Guide: Annotations, Names, Scopes, and More]]
*
@@ -30,14 +30,14 @@ import scala.language.implicitConversions
*/
trait Names {
/** An implicit conversion from String to TermName.
- * Enables an alternative notation `"map": TermName` as opposed to `newTermName("map")`.
+ * Enables an alternative notation `"map": TermName` as opposed to `TermName("map")`.
* @group Names
*/
@deprecated("Use explicit `TermName(s)` instead", "2.11.0")
implicit def stringToTermName(s: String): TermName = TermName(s)
/** An implicit conversion from String to TypeName.
- * Enables an alternative notation `"List": TypeName` as opposed to `newTypeName("List")`.
+ * Enables an alternative notation `"List": TypeName` as opposed to `TypeName("List")`.
* @group Names
*/
@deprecated("Use explicit `TypeName(s)` instead", "2.11.0")
diff --git a/src/reflect/scala/reflect/api/Printers.scala b/src/reflect/scala/reflect/api/Printers.scala
index 92ae6d8b44..01b9759c70 100644
--- a/src/reflect/scala/reflect/api/Printers.scala
+++ b/src/reflect/scala/reflect/api/Printers.scala
@@ -46,15 +46,15 @@ import java.io.{ PrintWriter, StringWriter }
* {{{
* scala> showRaw(tree)
* res1: String = Block(List(
- * ClassDef(Modifiers(FINAL), newTypeName("C"), List(), Template(
- * List(Ident(newTypeName("AnyRef"))),
+ * ClassDef(Modifiers(FINAL), TypeName("C"), List(), Template(
+ * List(Ident(TypeName("AnyRef"))),
* noSelfType,
* List(
* DefDef(Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree(),
* Block(List(
* Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())),
* Literal(Constant(())))),
- * DefDef(Modifiers(), newTermName("x"), List(), List(), TypeTree(),
+ * DefDef(Modifiers(), TermName("x"), List(), List(), TypeTree(),
* Literal(Constant(2))))))),
* Literal(Constant(())))
* }}}
@@ -70,23 +70,23 @@ import java.io.{ PrintWriter, StringWriter }
*
* scala> showRaw(cm.mkToolBox().typecheck(tree), printTypes = true)
* res2: String = Block[1](List(
- * ClassDef[2](Modifiers(FINAL), newTypeName("C"), List(), Template[3](
- * List(Ident[4](newTypeName("AnyRef"))),
+ * ClassDef[2](Modifiers(FINAL), TypeName("C"), List(), Template[3](
+ * List(Ident[4](TypeName("AnyRef"))),
* noSelfType,
* List(
* DefDef[2](Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree[3](),
* Block[1](List(
- * Apply[4](Select[5](Super[6](This[3](newTypeName("C")), tpnme.EMPTY), ...))),
+ * Apply[4](Select[5](Super[6](This[3](TypeName("C")), tpnme.EMPTY), ...))),
* Literal[1](Constant(())))),
- * DefDef[2](Modifiers(), newTermName("x"), List(), List(), TypeTree[7](),
+ * DefDef[2](Modifiers(), TermName("x"), List(), List(), TypeTree[7](),
* Literal[8](Constant(2))))))),
* Literal[1](Constant(())))
* [1] TypeRef(ThisType(scala), scala.Unit, List())
* [2] NoType
- * [3] TypeRef(NoPrefix, newTypeName("C"), List())
+ * [3] TypeRef(NoPrefix, TypeName("C"), List())
* [4] TypeRef(ThisType(java.lang), java.lang.Object, List())
* [5] MethodType(List(), TypeRef(ThisType(java.lang), java.lang.Object, List()))
- * [6] SuperType(ThisType(newTypeName("C")), TypeRef(... java.lang.Object ...))
+ * [6] SuperType(ThisType(TypeName("C")), TypeRef(... java.lang.Object ...))
* [7] TypeRef(ThisType(scala), scala.Int, List())
* [8] ConstantType(Constant(2))
* }}}
@@ -112,10 +112,10 @@ import java.io.{ PrintWriter, StringWriter }
* // showRaw has already been discussed above
* scala> showRaw(tpe)
* res1: String = RefinedType(
- * List(TypeRef(ThisType(scala), newTypeName("AnyRef"), List())),
+ * List(TypeRef(ThisType(scala), TypeName("AnyRef"), List())),
* Scope(
- * newTermName("x"),
- * newTermName("y")))
+ * TermName("x"),
+ * TermName("y")))
* }}}
*
* `printIds` and/or `printKinds` can additionally be supplied as arguments in a call to
@@ -124,10 +124,10 @@ import java.io.{ PrintWriter, StringWriter }
* {{{
* scala> showRaw(tpe, printIds = true, printKinds = true)
* res2: String = RefinedType(
- * List(TypeRef(ThisType(scala#2043#PK), newTypeName("AnyRef")#691#TPE, List())),
+ * List(TypeRef(ThisType(scala#2043#PK), TypeName("AnyRef")#691#TPE, List())),
* Scope(
- * newTermName("x")#2540#METH,
- * newTermName("y")#2541#GET))
+ * TermName("x")#2540#METH,
+ * TermName("y")#2541#GET))
* }}}
*
* For more details about `Printer`s and other aspects of Scala reflection, see the
diff --git a/src/reflect/scala/reflect/api/StandardDefinitions.scala b/src/reflect/scala/reflect/api/StandardDefinitions.scala
index 524b7ea14b..bf9cf5e334 100644
--- a/src/reflect/scala/reflect/api/StandardDefinitions.scala
+++ b/src/reflect/scala/reflect/api/StandardDefinitions.scala
@@ -128,7 +128,7 @@ trait StandardDefinitions {
* scala> import scala.reflect.runtime.universe._
* import scala.reflect.runtime.universe._
*
- * scala> val m = typeOf[C].member(newTermName("m")).asMethod
+ * scala> val m = typeOf[C].member(TermName("m")).asMethod
* m: reflect.runtime.universe.MethodSymbol = method m
*
* scala> m.params(0)(0).info
@@ -156,7 +156,7 @@ trait StandardDefinitions {
* scala> import scala.reflect.runtime.universe._
* import scala.reflect.runtime.universe._
*
- * scala> val m = typeOf[C].member(newTermName("m")).asMethod
+ * scala> val m = typeOf[C].member(TermName("m")).asMethod
* m: reflect.runtime.universe.MethodSymbol = method m
*
* scala> m.params(0)(0).info
@@ -181,7 +181,7 @@ trait StandardDefinitions {
* scala> import scala.reflect.runtime.universe._
* import scala.reflect.runtime.universe._
*
- * scala> val m = typeOf[C].member(newTermName("m")).asMethod
+ * scala> val m = typeOf[C].member(TermName("m")).asMethod
* m: reflect.runtime.universe.MethodSymbol = method m
*
* scala> m.params(0)(0).info
diff --git a/src/reflect/scala/reflect/api/Symbols.scala b/src/reflect/scala/reflect/api/Symbols.scala
index 42cf600c85..18d185067e 100644
--- a/src/reflect/scala/reflect/api/Symbols.scala
+++ b/src/reflect/scala/reflect/api/Symbols.scala
@@ -27,7 +27,7 @@ package api
* scala> class C[T] { def test[U](x: T)(y: U): Int = ??? }
* defined class C
*
- * scala> val test = typeOf[C[Int]].member(newTermName("test")).asMethod
+ * scala> val test = typeOf[C[Int]].member(TermName("test")).asMethod
* test: reflect.runtime.universe.MethodSymbol = method test
*
* scala> test.info
diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala
index ff8926651b..aeaa38c317 100644
--- a/src/reflect/scala/reflect/api/Trees.scala
+++ b/src/reflect/scala/reflect/api/Trees.scala
@@ -33,7 +33,7 @@ package api
*
* The following creates an AST representing `print("Hello World")`:
* {{{
- * Apply(Select(Select(This(newTypeName("scala")), newTermName("Predef")), newTermName("print")), List(Literal(Constant("Hello World"))))
+ * Apply(Select(Select(This(TypeName("scala")), TermName("Predef")), TermName("print")), List(Literal(Constant("Hello World"))))
* }}}
*
* The following creates an AST from a literal 5, and then uses `showRaw` to print it in a readable format.
@@ -1098,11 +1098,11 @@ trait Trees { self: Universe =>
* // a dummy node that carries the type of unapplication to patmat
* // the <unapply-selector> here doesn't have an underlying symbol
* // it only has a type assigned, therefore after `untypecheck` this tree is no longer typeable
- * Apply(Select(Ident(Foo), newTermName("unapply")), List(Ident(newTermName("<unapply-selector>")))),
+ * Apply(Select(Ident(Foo), TermName("unapply")), List(Ident(TermName("<unapply-selector>")))),
* // arguments of the unapply => nothing synthetic here
- * List(Bind(newTermName("x"), Ident(nme.WILDCARD)))),
+ * List(Bind(TermName("x"), Ident(nme.WILDCARD)))),
* EmptyTree,
- * Ident(newTermName("x")))))
+ * Ident(TermName("x")))))
* }}}
*
* Introduced by typer. Eliminated by compiler phases patmat (in the new pattern matcher of 2.10) or explicitouter (in the old pre-2.10 pattern matcher).
diff --git a/src/reflect/scala/reflect/internal/Depth.scala b/src/reflect/scala/reflect/internal/Depth.scala
index 357abf765f..a330e0accb 100644
--- a/src/reflect/scala/reflect/internal/Depth.scala
+++ b/src/reflect/scala/reflect/internal/Depth.scala
@@ -21,8 +21,20 @@ final class Depth private (val depth: Int) extends AnyVal with Ordered[Depth] {
object Depth {
// A don't care value for the depth parameter in lubs/glbs and related operations.
- final val AnyDepth = new Depth(Int.MinValue)
+ // When passed this value, the recursion budget will be inferred from the shape of
+ // the `typeDepth` of the list of types.
+ final val AnyDepthValue = -3
+ final val AnyDepth = new Depth(AnyDepthValue)
+
final val Zero = new Depth(0)
- @inline final def apply(depth: Int): Depth = if (depth < 0) AnyDepth else new Depth(depth)
+ // SI-9018: A negative depth is used to signal that we have breached the recursion limit.
+ // The LUB/GLB implementation will then truncate to Any/Nothing.
+ //
+ // We only really need one of these, but we allow representation of Depth(-1) and Depth(-2)
+ // to mimic the historical choice of 2.10.4.
+ @inline final def apply(depth: Int): Depth = {
+ if (depth < AnyDepthValue) AnyDepth
+ else new Depth(depth)
+ }
}
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index f2517fff54..667ff7c4b4 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -248,6 +248,7 @@ trait StdNames {
final val Unliftable: NameType = "Unliftable"
final val Name: NameType = "Name"
final val Tree: NameType = "Tree"
+ final val Text: NameType = "Text"
final val TermName: NameType = "TermName"
final val Type : NameType = "Type"
final val TypeName: NameType = "TypeName"
@@ -778,6 +779,7 @@ trait StdNames {
val values : NameType = "values"
val wait_ : NameType = "wait"
val withFilter: NameType = "withFilter"
+ val xml: NameType = "xml"
val zero: NameType = "zero"
// quasiquote interpolators:
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index 51f06b1d6d..c665f2b91a 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -792,6 +792,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def isDefinedInPackage = effectiveOwner.isPackageClass
final def needsFlatClasses = phase.flatClasses && rawowner != NoSymbol && !rawowner.isPackageClass
+ // TODO introduce a flag for these?
+ final def isPatternTypeVariable: Boolean =
+ isAbstractType && !isExistential && !isTypeParameterOrSkolem && isLocalToBlock
+
/** change name by appending $$<fully-qualified-name-of-class `base`>
* Do the same for any accessed symbols or setters/getters.
* Implementation in TermSymbol.
@@ -3428,10 +3432,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
trait StubSymbol extends Symbol {
devWarning("creating stub symbol to defer error: " + missingMessage)
- protected def missingMessage: String
+ def missingMessage: String
/** Fail the stub by throwing a [[scala.reflect.internal.MissingRequirementError]]. */
- override final def failIfStub() = {MissingRequirementError.signal(missingMessage)} //
+ override final def failIfStub() =
+ MissingRequirementError.signal(missingMessage)
/** Fail the stub by reporting an error to the reporter, setting the IS_ERROR flag
* on this symbol, and returning the dummy value `alt`.
@@ -3456,8 +3461,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def rawInfo = fail(NoType)
override def companionSymbol = fail(NoSymbol)
}
- class StubClassSymbol(owner0: Symbol, name0: TypeName, protected val missingMessage: String) extends ClassSymbol(owner0, owner0.pos, name0) with StubSymbol
- class StubTermSymbol(owner0: Symbol, name0: TermName, protected val missingMessage: String) extends TermSymbol(owner0, owner0.pos, name0) with StubSymbol
+ class StubClassSymbol(owner0: Symbol, name0: TypeName, val missingMessage: String) extends ClassSymbol(owner0, owner0.pos, name0) with StubSymbol
+ class StubTermSymbol(owner0: Symbol, name0: TermName, val missingMessage: String) extends TermSymbol(owner0, owner0.pos, name0) with StubSymbol
trait FreeSymbol extends Symbol {
def origin: String
diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
index 5433bfad60..1fc7aebab0 100644
--- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
+++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
@@ -211,7 +211,12 @@ abstract class UnPickler {
def fromName(name: Name) = name.toTermName match {
case nme.ROOT => loadingMirror.RootClass
case nme.ROOTPKG => loadingMirror.RootPackage
- case _ => adjust(owner.info.decl(name))
+ case _ =>
+ val decl = owner match {
+ case stub: StubSymbol => NoSymbol // SI-8502 Don't call .info and fail the stub
+ case _ => owner.info.decl(name)
+ }
+ adjust(decl)
}
def nestedObjectSymbol: Symbol = {
// If the owner is overloaded (i.e. a method), it's not possible to select the
@@ -389,14 +394,24 @@ abstract class UnPickler {
case CLASSINFOtpe => ClassInfoType(parents, symScope(clazz), clazz)
}
+ def readThisType(): Type = {
+ val sym = readSymbolRef() match {
+ case stub: StubSymbol if !stub.isClass =>
+ // SI-8502 This allows us to create a stub for a unpickled reference to `missingPackage.Foo`.
+ stub.owner.newStubSymbol(stub.name.toTypeName, stub.missingMessage)
+ case sym => sym
+ }
+ ThisType(sym)
+ }
+
// We're stuck with the order types are pickled in, but with judicious use
// of named parameters we can recapture a declarative flavor in a few cases.
// But it's still a rat's nest of adhockery.
(tag: @switch) match {
case NOtpe => NoType
case NOPREFIXtpe => NoPrefix
- case THIStpe => ThisType(readSymbolRef())
- case SINGLEtpe => SingleType(readTypeRef(), readSymbolRef())
+ case THIStpe => readThisType()
+ case SINGLEtpe => SingleType(readTypeRef(), readSymbolRef().filter(_.isStable)) // SI-7596 account for overloading
case SUPERtpe => SuperType(readTypeRef(), readTypeRef())
case CONSTANTtpe => ConstantType(readConstantRef())
case TYPEREFtpe => TypeRef(readTypeRef(), readSymbolRef(), readTypes())
diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala
index ac1159b2ac..bcefcc471f 100644
--- a/src/reflect/scala/reflect/io/AbstractFile.scala
+++ b/src/reflect/scala/reflect/io/AbstractFile.scala
@@ -48,14 +48,16 @@ object AbstractFile {
else null
/**
- * If the specified URL exists and is a readable zip or jar archive,
- * returns an abstract directory backed by it. Otherwise, returns
- * `null`.
+ * If the specified URL exists and is a regular file or a directory, returns an
+ * abstract regular file or an abstract directory, respectively, backed by it.
+ * Otherwise, returns `null`.
*/
- def getURL(url: URL): AbstractFile = {
- if (url == null || !Path.isExtensionJarOrZip(url.getPath)) null
- else ZipArchive fromURL url
- }
+ def getURL(url: URL): AbstractFile =
+ if (url.getProtocol == "file") {
+ val f = new java.io.File(url.getPath)
+ if (f.isDirectory) getDirectory(f)
+ else getFile(f)
+ } else null
def getResources(url: URL): AbstractFile = ZipArchive fromManifestURL url
}
diff --git a/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala b/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala
index 5edc051461..586b8a5257 100644
--- a/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala
+++ b/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala
@@ -11,12 +11,16 @@ private[reflect] trait ThreadLocalStorage {
trait ThreadLocalStorage[T] { def get: T; def set(newValue: T): Unit }
private class MyThreadLocalStorage[T](initialValue: => T) extends ThreadLocalStorage[T] {
// TODO: how do we use org.cliffc.high_scale_lib.NonBlockingHashMap here?
- val values = new java.util.concurrent.ConcurrentHashMap[Thread, T]()
+ // (we would need a version that uses weak keys)
+ private val values = java.util.Collections.synchronizedMap(new java.util.WeakHashMap[Thread, T]())
def get: T = {
if (values containsKey currentThread) values.get(currentThread)
else {
val value = initialValue
- values.putIfAbsent(currentThread, value)
+ // since the key is currentThread, and `values` is private, it
+ // would be impossible for a value to have been set after the
+ // above containsKey check. `putIfAbsent` is not necessary.
+ values.put(currentThread, value)
value
}
}
diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
index 93992a7cae..ee1dea4dc1 100644
--- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
@@ -19,6 +19,7 @@ import scala.reflect.internal.util.{ BatchSourceFile, ScalaClassLoader }
import ScalaClassLoader._
import scala.reflect.io.{ File, Directory }
import scala.tools.util._
+import io.AbstractFile
import scala.collection.generic.Clearable
import scala.concurrent.{ ExecutionContext, Await, Future, future }
import ExecutionContext.Implicits._
@@ -217,7 +218,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
nullary("power", "enable power user mode", powerCmd),
nullary("quit", "exit the interpreter", () => Result(keepRunning = false, None)),
cmd("replay", "[options]", "reset the repl and replay all previous commands", replayCommand),
- //cmd("require", "<path>", "add a jar or directory to the classpath", require), // TODO
+ cmd("require", "<path>", "add a jar to the classpath", require),
cmd("reset", "[options]", "reset the repl to its initial state, forgetting all session entries", resetCommand),
cmd("save", "<path>", "save replayable session to a file", saveCommand),
shCommand,
@@ -616,13 +617,57 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
val f = File(arg).normalize
if (f.exists) {
addedClasspath = ClassPath.join(addedClasspath, f.path)
- val totalClasspath = ClassPath.join(settings.classpath.value, addedClasspath)
- echo("Added '%s'. Your new classpath is:\n\"%s\"".format(f.path, totalClasspath))
- replay()
+ intp.addUrlsToClassPath(f.toURI.toURL)
+ echo("Added '%s' to classpath.".format(f.path, intp.global.classPath.asClasspathString))
+ repldbg("Added '%s'. Your new classpath is:\n\"%s\"".format(f.path, intp.global.classPath.asClasspathString))
}
else echo("The path '" + f + "' doesn't seem to exist.")
}
+ /** Adds jar file to the current classpath. Jar will only be added if it
+ * does not contain classes that already exist on the current classpath.
+ *
+ * Importantly, `require` adds jars to the classpath ''without'' resetting
+ * the state of the interpreter. This is in contrast to `replay` which can
+ * be used to add jars to the classpath and which creates a new instance of
+ * the interpreter and replays all interpreter expressions.
+ */
+ def require(arg: String): Unit = {
+ class InfoClassLoader extends java.lang.ClassLoader {
+ def classOf(arr: Array[Byte]): Class[_] =
+ super.defineClass(null, arr, 0, arr.length)
+ }
+
+ val f = File(arg).normalize
+
+ if (f.isDirectory) {
+ echo("Adding directories to the classpath is not supported. Add a jar instead.")
+ return
+ }
+
+ val jarFile = AbstractFile.getDirectory(new java.io.File(arg))
+
+ def flatten(f: AbstractFile): Iterator[AbstractFile] =
+ if (f.isClassContainer) f.iterator.flatMap(flatten)
+ else Iterator(f)
+
+ val entries = flatten(jarFile)
+ val cloader = new InfoClassLoader
+
+ def classNameOf(classFile: AbstractFile): String = cloader.classOf(classFile.toByteArray).getName
+ def alreadyDefined(clsName: String) = intp.classLoader.tryToLoadClass(clsName).isDefined
+ val exists = entries.filter(_.hasExtension("class")).map(classNameOf).exists(alreadyDefined)
+
+ if (!f.exists) echo(s"The path '$f' doesn't seem to exist.")
+ else if (exists) echo(s"The path '$f' cannot be loaded, because existing classpath entries conflict.") // TODO tell me which one
+ else {
+ addedClasspath = ClassPath.join(addedClasspath, f.path)
+ intp.addUrlsToClassPath(f.toURI.toURL)
+ echo("Added '%s' to classpath.".format(f.path, intp.global.classPath.asClasspathString))
+ repldbg("Added '%s'. Your new classpath is:\n\"%s\"".format(f.path, intp.global.classPath.asClasspathString))
+ }
+ }
+
def powerCmd(): Result = {
if (isReplPower) "Already in power mode."
else enablePowerMode(isDuringInit = false)
diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala
index 3f4922a602..b990e401ec 100644
--- a/src/repl/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala
@@ -18,9 +18,13 @@ import scala.reflect.internal.util.{ BatchSourceFile, SourceFile }
import scala.tools.util.PathResolver
import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.typechecker.{ TypeStrings, StructuredTypeStrings }
-import scala.tools.nsc.util.{ ScalaClassLoader, stringFromReader, stringFromWriter, StackTraceOps }
+import scala.tools.nsc.util.{ ScalaClassLoader, stringFromReader, stringFromWriter, StackTraceOps, ClassPath, MergedClassPath }
+import ScalaClassLoader.URLClassLoader
import scala.tools.nsc.util.Exceptional.unwrap
+import scala.tools.nsc.backend.JavaPlatform
import javax.script.{AbstractScriptEngine, Bindings, ScriptContext, ScriptEngine, ScriptEngineFactory, ScriptException, CompiledScript, Compilable}
+import java.net.URL
+import java.io.File
/** An interpreter for Scala code.
*
@@ -82,6 +86,8 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
private var _classLoader: util.AbstractFileClassLoader = null // active classloader
private val _compiler: ReplGlobal = newCompiler(settings, reporter) // our private compiler
+ private var _runtimeClassLoader: URLClassLoader = null // wrapper exposing addURL
+
def compilerClasspath: Seq[java.net.URL] = (
if (isInitializeComplete) global.classPath.asURLs
else new PathResolver(settings).result.asURLs // the compiler's classpath
@@ -237,6 +243,18 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
new Global(settings, reporter) with ReplGlobal { override def toString: String = "<global>" }
}
+ /**
+ * Adds all specified jars to the compile and runtime classpaths.
+ *
+ * @note Currently only supports jars, not directories.
+ * @param urls The list of items to add to the compile and runtime classpaths.
+ */
+ def addUrlsToClassPath(urls: URL*): Unit = {
+ new Run // force some initialization
+ urls.foreach(_runtimeClassLoader.addURL) // Add jars to runtime classloader
+ global.extendCompilerClassPath(urls: _*) // Add jars to compile-time classpath
+ }
+
/** Parent classloader. Overridable. */
protected def parentClassLoader: ClassLoader =
settings.explicitParentLoader.getOrElse( this.getClass.getClassLoader() )
@@ -329,9 +347,9 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
}
}
private def makeClassLoader(): util.AbstractFileClassLoader =
- new TranslatingClassLoader(parentClassLoader match {
- case null => ScalaClassLoader fromURLs compilerClasspath
- case p => new ScalaClassLoader.URLClassLoader(compilerClasspath, p)
+ new TranslatingClassLoader({
+ _runtimeClassLoader = new URLClassLoader(compilerClasspath, parentClassLoader)
+ _runtimeClassLoader
})
// Set the current Java "context" class loader to this interpreter's class loader
diff --git a/src/scaladoc/scala/tools/nsc/doc/DocParser.scala b/src/scaladoc/scala/tools/nsc/doc/DocParser.scala
index 6dc3e5a62b..f03b848af6 100644
--- a/src/scaladoc/scala/tools/nsc/doc/DocParser.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/DocParser.scala
@@ -15,13 +15,14 @@ import DocParser.Parsed
* right after parsing so it can read `DocDefs` from source code which would
* otherwise cause the compiler to go haywire.
*/
-class DocParser(settings: nsc.Settings, reporter: Reporter) extends Global(settings, reporter) {
+class DocParser(settings: nsc.Settings, reporter: Reporter) extends Global(settings, reporter) with ScaladocGlobalTrait {
def this(settings: Settings) = this(settings, new ConsoleReporter(settings))
def this() = this(new Settings(Console println _))
// the usual global initialization
locally { new Run() }
+ override def forScaladoc = true
override protected def computeInternalPhases() {
phasesSet += syntaxAnalyzer
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
index 7a67055ffa..7289edc137 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -313,7 +313,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
/* Subclass cache */
private lazy val subClassesCache = (
- if (sym == AnyRefClass) null
+ if (sym == AnyRefClass || sym == AnyClass) null
else mutable.ListBuffer[DocTemplateEntity]()
)
def registerSubClass(sc: DocTemplateEntity): Unit = {
diff --git a/test/disabled/run/t8946.scala b/test/disabled/run/t8946.scala
new file mode 100644
index 0000000000..a248a20501
--- /dev/null
+++ b/test/disabled/run/t8946.scala
@@ -0,0 +1,29 @@
+// Tests to assert that references to threads are not strongly held when scala-reflection is used inside of them.
+object Test {
+ import scala.ref.WeakReference
+
+ def forceGc() = {
+ var obj = new Object
+ val ref = new WeakReference(obj)
+ obj = null;
+ while(ref.get.nonEmpty)
+ Array.ofDim[Byte](16 * 1024 * 1024)
+ }
+
+ def main(args: Array[String]): Unit = {
+ val threads = for (i <- (1 to 16)) yield {
+ val t = new Thread {
+ override def run(): Unit = {
+ import reflect.runtime.universe._
+ typeOf[List[String]] <:< typeOf[Seq[_]]
+ }
+ }
+ t.start()
+ t.join()
+ WeakReference(t)
+ }
+ forceGc()
+ val nonGCdThreads = threads.filter(_.get.nonEmpty).length
+ assert(nonGCdThreads == 0, s"${nonGCdThreads} threads were retained; expected 0.")
+ }
+}
diff --git a/test/files/neg/t2866.check b/test/files/neg/t2866.check
new file mode 100644
index 0000000000..340fb8da22
--- /dev/null
+++ b/test/files/neg/t2866.check
@@ -0,0 +1,17 @@
+t2866.scala:30: warning: imported `one' is permanently hidden by definition of value one
+ import A.one // warning: imported `one' is permanently hidden by definition of value one.
+ ^
+t2866.scala:42: error: ambiguous implicit values:
+ both value two of type Int
+ and value one in object A of type => Int
+ match expected type Int
+ assert(implicitly[Int] == 2) // !!! Not ambiguous in 2.8.0. Ambigous in 2.7.6
+ ^
+t2866.scala:50: error: ambiguous implicit values:
+ both value two of type Int
+ and value one in object A of type => Int
+ match expected type Int
+ assert(implicitly[Int] == 2) // !!! Not ambiguous in 2.8.0. Ambiguous in 2.7.6
+ ^
+one warning found
+two errors found
diff --git a/test/files/neg/t2866.scala b/test/files/neg/t2866.scala
new file mode 100644
index 0000000000..55ebff9710
--- /dev/null
+++ b/test/files/neg/t2866.scala
@@ -0,0 +1,59 @@
+// for 2.7.x compatibility
+
+object A {
+ implicit val one = 1
+}
+
+object Test {
+
+ locally {
+ import A._
+ locally {
+ // assert(implicitly[Int] == 1) // error: could not find implicit value for parameter e: Int.
+ // !!! Why one A.one?
+ // (I assume you mean: why _not_ A.one? A.one is shadowed by local one.
+ // but the local one cannot be used yet because it does not have an explicit type.
+ implicit val one = 2
+ assert(implicitly[Int] == 2)
+ assert(one == 2)
+ }
+ }
+
+ locally {
+ import A._
+ implicit val one: Int = 2
+ assert(implicitly[Int] == 2)
+ assert(one == 2)
+ }
+
+ locally {
+ import A.one // warning: imported `one' is permanently hidden by definition of value one.
+ // !!! Really?
+ //assert(implicitly[Int] == 1)
+ implicit val one = 2
+ assert(implicitly[Int] == 2) // !!! why not 2?
+ assert(one == 2)
+ }
+
+ locally {
+ import A.one
+ assert(implicitly[Int] == 1)
+ implicit val two = 2
+ assert(implicitly[Int] == 2) // !!! Not ambiguous in 2.8.0. Ambigous in 2.7.6
+ }
+
+ locally {
+ import A._
+ assert(implicitly[Int] == 1)
+ implicit val two = 2
+ import A.{one => _}
+ assert(implicitly[Int] == 2) // !!! Not ambiguous in 2.8.0. Ambiguous in 2.7.6
+ }
+
+ locally {
+ import A.{one => _, _}
+ implicit val two = 2
+ assert(implicitly[Int] == 2) // not ambiguous in 2.8.0 nor im ambiguous in 2.7.6
+ }
+
+}
diff --git a/test/files/neg/t5148.check b/test/files/neg/t5148.check
index 0de4fe2d4c..286ed9e04a 100644
--- a/test/files/neg/t5148.check
+++ b/test/files/neg/t5148.check
@@ -1,6 +1,11 @@
error: missing or invalid dependency detected while loading class file 'Imports.class'.
+Could not access type Wrapper in class scala.tools.nsc.interpreter.IMain.Request,
+because it (or its dependencies) are missing. Check your build definition for
+missing or conflicting dependencies. (Re-run with `-Ylog-classpath` to see the problematic classpath.)
+A full rebuild may help if 'Imports.class' was compiled against an incompatible version of scala.tools.nsc.interpreter.IMain.Request.
+error: missing or invalid dependency detected while loading class file 'Imports.class'.
Could not access type Request in class scala.tools.nsc.interpreter.IMain,
because it (or its dependencies) are missing. Check your build definition for
missing or conflicting dependencies. (Re-run with `-Ylog-classpath` to see the problematic classpath.)
A full rebuild may help if 'Imports.class' was compiled against an incompatible version of scala.tools.nsc.interpreter.IMain.
-one error found
+two errors found
diff --git a/test/files/neg/t5639b.check b/test/files/neg/t5639b.check
new file mode 100644
index 0000000000..faa1766660
--- /dev/null
+++ b/test/files/neg/t5639b.check
@@ -0,0 +1,4 @@
+A_2.scala:6: error: could not find implicit value for parameter e: Int
+ implicitly[Int]
+ ^
+one error found
diff --git a/test/files/neg/t5639b/A_1.scala b/test/files/neg/t5639b/A_1.scala
new file mode 100644
index 0000000000..c5da10eae4
--- /dev/null
+++ b/test/files/neg/t5639b/A_1.scala
@@ -0,0 +1,17 @@
+import Implicits._
+
+class Baz
+
+object Test {
+ implicitly[Int]
+}
+
+object Implicits {
+ implicit val Baz: Int = 0
+ // This implicit was being ignored by `isQualifyingImplicit`
+ // if the classpath contained a class file for `class Baz`.
+ // This is because the package scope contains a speculative
+ // symbol for `object Baz` which is entered by `SymbolLoaders`
+ // before looking inside the class file. (A Java originated
+ // classfile results in the class/module symbol pair.)
+}
diff --git a/test/files/neg/t5639b/A_2.scala b/test/files/neg/t5639b/A_2.scala
new file mode 100644
index 0000000000..2bb36273e0
--- /dev/null
+++ b/test/files/neg/t5639b/A_2.scala
@@ -0,0 +1,11 @@
+import Implicits._
+
+class Baz
+
+object Test {
+ implicitly[Int]
+}
+
+object Implicits {
+ implicit val Baz: Int = 0
+}
diff --git a/test/files/neg/t8534.check b/test/files/neg/t8534.check
new file mode 100644
index 0000000000..297e7c1beb
--- /dev/null
+++ b/test/files/neg/t8534.check
@@ -0,0 +1,4 @@
+t8534.scala:6: error: MyTrait is not an enclosing class
+ class BugTest {def isTheBugHere(in: MyTrait.this.type#SomeData) = false}
+ ^
+one error found
diff --git a/test/files/neg/t8534.scala b/test/files/neg/t8534.scala
new file mode 100644
index 0000000000..f118d22b82
--- /dev/null
+++ b/test/files/neg/t8534.scala
@@ -0,0 +1,7 @@
+object line1 {
+ trait MyTrait
+}
+object line2 {
+ import line2._
+ class BugTest {def isTheBugHere(in: MyTrait.this.type#SomeData) = false}
+}
diff --git a/test/files/neg/t8534b.check b/test/files/neg/t8534b.check
new file mode 100644
index 0000000000..39ffa41194
--- /dev/null
+++ b/test/files/neg/t8534b.check
@@ -0,0 +1,4 @@
+t8534b.scala:3: error: stable identifier required, but foo.type found.
+ type T = foo.type#Foo
+ ^
+one error found
diff --git a/test/files/neg/t8534b.scala b/test/files/neg/t8534b.scala
new file mode 100644
index 0000000000..73b6703a9c
--- /dev/null
+++ b/test/files/neg/t8534b.scala
@@ -0,0 +1,4 @@
+object Test {
+ def foo = ""
+ type T = foo.type#Foo
+}
diff --git a/test/files/neg/t8597.check b/test/files/neg/t8597.check
new file mode 100644
index 0000000000..bc945f9191
--- /dev/null
+++ b/test/files/neg/t8597.check
@@ -0,0 +1,21 @@
+t8597.scala:2: warning: abstract type T in type pattern Some[T] is unchecked since it is eliminated by erasure
+ def nowarn[T] = (null: Any) match { case _: Some[T] => } // warn (did not warn due to SI-8597)
+ ^
+t8597.scala:5: warning: abstract type pattern T is unchecked since it is eliminated by erasure
+ def warn1[T] = (null: Any) match { case _: T => } // warn
+ ^
+t8597.scala:6: warning: non-variable type argument String in type pattern Some[String] is unchecked since it is eliminated by erasure
+ def warn2 = (null: Any) match { case _: Some[String] => } // warn
+ ^
+t8597.scala:7: warning: non-variable type argument Unchecked.this.C in type pattern Some[Unchecked.this.C] is unchecked since it is eliminated by erasure
+ (null: Any) match { case _: Some[C] => } // warn
+ ^
+t8597.scala:18: warning: abstract type T in type pattern Array[T] is unchecked since it is eliminated by erasure
+ def warnArray[T] = (null: Any) match { case _: Array[T] => } // warn (did not warn due to SI-8597)
+ ^
+t8597.scala:26: warning: non-variable type argument String in type pattern Array[Array[List[String]]] is unchecked since it is eliminated by erasure
+ def warnArrayErasure2 = (null: Any) match {case Some(_: Array[Array[List[String]]]) => } // warn
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+6 warnings found
+one error found
diff --git a/test/files/neg/t8597.flags b/test/files/neg/t8597.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/neg/t8597.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/neg/t8597.scala b/test/files/neg/t8597.scala
new file mode 100644
index 0000000000..068e87d91a
--- /dev/null
+++ b/test/files/neg/t8597.scala
@@ -0,0 +1,27 @@
+class Unchecked[C] {
+ def nowarn[T] = (null: Any) match { case _: Some[T] => } // warn (did not warn due to SI-8597)
+
+ // These warned before.
+ def warn1[T] = (null: Any) match { case _: T => } // warn
+ def warn2 = (null: Any) match { case _: Some[String] => } // warn
+ (null: Any) match { case _: Some[C] => } // warn
+
+ // These must remain without warnings. These are excerpts from
+ // related tests that are more exhauative.
+ class C; class D extends C
+ def okay = (List(new D) : Seq[D]) match { case _: List[C] => case _ => } // nowarn
+ class B2[A, B]
+ class A2[X] extends B2[X, String]
+ def okay2(x: A2[Int]) = x match { case _: B2[Int, _] => true } // nowarn
+ def okay3(x: A2[Int]) = x match { case _: B2[Int, typeVar] => true } // nowarn
+
+ def warnArray[T] = (null: Any) match { case _: Array[T] => } // warn (did not warn due to SI-8597)
+ def nowarnArrayC = (null: Any) match { case _: Array[C] => } // nowarn
+
+ def nowarnArrayTypeVar[T] = (null: Any) match { case _: Array[t] => } // nowarn
+
+ def noWarnArrayErasure1 = (null: Any) match {case Some(_: Array[String]) => } // nowarn
+ def noWarnArrayErasure2 = (null: Any) match {case Some(_: Array[List[_]]) => } // nowarn
+ def noWarnArrayErasure3 = (null: Any) match {case Some(_: Array[Array[List[_]]]) => } // nowarn
+ def warnArrayErasure2 = (null: Any) match {case Some(_: Array[Array[List[String]]]) => } // warn
+}
diff --git a/test/files/neg/t8597b.check b/test/files/neg/t8597b.check
new file mode 100644
index 0000000000..3c45a31337
--- /dev/null
+++ b/test/files/neg/t8597b.check
@@ -0,0 +1,6 @@
+t8597b.scala:18: warning: non-variable type argument T in type pattern Some[T] is unchecked since it is eliminated by erasure
+ case _: Some[T] => // warn
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/t8597b.flags b/test/files/neg/t8597b.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/neg/t8597b.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/neg/t8597b.scala b/test/files/neg/t8597b.scala
new file mode 100644
index 0000000000..b29d591cb1
--- /dev/null
+++ b/test/files/neg/t8597b.scala
@@ -0,0 +1,21 @@
+object Unchecked {
+ (null: Any) match {
+ case _: Some[t] =>
+
+ // t is a fresh pattern type variable, despite our attempts to
+ // backtick our way to the enclosing `t`. Under this interpretation,
+ // the absense of an unchecked warning is expected.
+ (null: Any) match {
+ case _: Some[t] => // no warn
+ }
+ (null: Any) match {
+ case _: Some[`t`] => // no warn
+ }
+
+ // here we correctly issue an unchecked warning
+ type T = t
+ (null: Any) match {
+ case _: Some[T] => // warn
+ }
+ }
+}
diff --git a/test/files/neg/t963.check b/test/files/neg/t963.check
index 4dc202c7bd..483e53c77d 100644
--- a/test/files/neg/t963.check
+++ b/test/files/neg/t963.check
@@ -1,9 +1,9 @@
-t963.scala:14: error: stable identifier required, but Test.this.y3.x found.
+t963.scala:14: error: stable identifier required, but y3.x.type found.
val w3 : y3.x.type = y3.x
- ^
-t963.scala:17: error: stable identifier required, but Test.this.y4.x found.
+ ^
+t963.scala:17: error: stable identifier required, but y4.x.type found.
val w4 : y4.x.type = y4.x
- ^
+ ^
t963.scala:10: error: type mismatch;
found : AnyRef{def x: Integer}
required: AnyRef{val x: Integer}
diff --git a/test/files/neg/unchecked-abstract.check b/test/files/neg/unchecked-abstract.check
index 72019082ac..703929dca8 100644
--- a/test/files/neg/unchecked-abstract.check
+++ b/test/files/neg/unchecked-abstract.check
@@ -4,6 +4,9 @@ unchecked-abstract.scala:16: warning: abstract type H in type Contravariant[M.th
unchecked-abstract.scala:21: warning: abstract type H in type Contravariant[M.this.H] is unchecked since it is eliminated by erasure
/* warn */ println(x.isInstanceOf[Contravariant[H]])
^
+unchecked-abstract.scala:22: warning: abstract type T in type Contravariant[M.this.T] is unchecked since it is eliminated by erasure
+ /* warn */ println(x.isInstanceOf[Contravariant[T]])
+ ^
unchecked-abstract.scala:27: warning: abstract type T in type Invariant[M.this.T] is unchecked since it is eliminated by erasure
/* warn */ println(x.isInstanceOf[Invariant[T]])
^
@@ -22,6 +25,15 @@ unchecked-abstract.scala:36: warning: abstract type H in type Invariant[M.this.H
unchecked-abstract.scala:37: warning: abstract type T in type Invariant[M.this.T] is unchecked since it is eliminated by erasure
/* warn */ println(x.isInstanceOf[Invariant[T]])
^
+unchecked-abstract.scala:42: warning: abstract type T in type Covariant[M.this.T] is unchecked since it is eliminated by erasure
+ /* warn */ println(x.isInstanceOf[Covariant[T]])
+ ^
+unchecked-abstract.scala:43: warning: abstract type L in type Covariant[M.this.L] is unchecked since it is eliminated by erasure
+ /* warn */ println(x.isInstanceOf[Covariant[L]])
+ ^
+unchecked-abstract.scala:48: warning: abstract type L in type Covariant[M.this.L] is unchecked since it is eliminated by erasure
+ /* warn */ println(x.isInstanceOf[Covariant[L]])
+ ^
error: No warnings can be incurred under -Xfatal-warnings.
-8 warnings found
+12 warnings found
one error found
diff --git a/test/files/pos/t5639.flags b/test/files/pos/t5639.flags
new file mode 100644
index 0000000000..0acce1e7ce
--- /dev/null
+++ b/test/files/pos/t5639.flags
@@ -0,0 +1 @@
+-Xsource:2.12
diff --git a/test/files/pos/t5639/A_1.scala b/test/files/pos/t5639/A_1.scala
new file mode 100644
index 0000000000..c5da10eae4
--- /dev/null
+++ b/test/files/pos/t5639/A_1.scala
@@ -0,0 +1,17 @@
+import Implicits._
+
+class Baz
+
+object Test {
+ implicitly[Int]
+}
+
+object Implicits {
+ implicit val Baz: Int = 0
+ // This implicit was being ignored by `isQualifyingImplicit`
+ // if the classpath contained a class file for `class Baz`.
+ // This is because the package scope contains a speculative
+ // symbol for `object Baz` which is entered by `SymbolLoaders`
+ // before looking inside the class file. (A Java originated
+ // classfile results in the class/module symbol pair.)
+}
diff --git a/test/files/pos/t5639/A_2.scala b/test/files/pos/t5639/A_2.scala
new file mode 100644
index 0000000000..2bb36273e0
--- /dev/null
+++ b/test/files/pos/t5639/A_2.scala
@@ -0,0 +1,11 @@
+import Implicits._
+
+class Baz
+
+object Test {
+ implicitly[Int]
+}
+
+object Implicits {
+ implicit val Baz: Int = 0
+}
diff --git a/test/files/pos/t5639/Bar.scala b/test/files/pos/t5639/Bar.scala
deleted file mode 100644
index f577500acd..0000000000
--- a/test/files/pos/t5639/Bar.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package pack.age
-
-import pack.age.Implicits._
-
-object Quux {
- def baz : Baz = 1
-}
diff --git a/test/files/pos/t5639/Foo.scala b/test/files/pos/t5639/Foo.scala
deleted file mode 100644
index 1a07734a8e..0000000000
--- a/test/files/pos/t5639/Foo.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package pack.age
-
-class Baz
-
-object Implicits {
- implicit def Baz(n: Int): Baz = new Baz
-}
diff --git a/test/files/pos/t7596/A_1.scala b/test/files/pos/t7596/A_1.scala
new file mode 100644
index 0000000000..6303c6d132
--- /dev/null
+++ b/test/files/pos/t7596/A_1.scala
@@ -0,0 +1,10 @@
+trait Driver {
+ abstract class Table
+}
+
+object Config {
+ val driver : Driver = ???
+ def driver(a: Any) = ???
+}
+
+object Sites extends Config.driver.Table
diff --git a/test/files/pos/t7596/B_2.scala b/test/files/pos/t7596/B_2.scala
new file mode 100644
index 0000000000..977e5c8bd1
--- /dev/null
+++ b/test/files/pos/t7596/B_2.scala
@@ -0,0 +1,19 @@
+object Test {
+ locally {
+ Sites: Config.driver.Table
+ }
+}
+
+// Under separate compilation, the pickler is foiled by the
+// overloaded term `Config.driver`, and results in:
+
+// qbin/scalac test/files/pos/t7596/A_1.scala && qbin/scalac -explaintypes test/files/pos/t7596/B_2.scala
+// test/files/pos/t7596/B_2.scala:3: error: type mismatch;
+// found : Sites.type
+// required: Config.driver.Table
+// Sites: Config.driver.Table
+// ^
+// Sites.type <: Config.driver.Table?
+// Driver.this.type = Config.driver.type?
+// false
+// false \ No newline at end of file
diff --git a/test/files/pos/t7596b/A.scala b/test/files/pos/t7596b/A.scala
new file mode 100644
index 0000000000..65c1bc56ef
--- /dev/null
+++ b/test/files/pos/t7596b/A.scala
@@ -0,0 +1,10 @@
+trait H2Driver{
+ abstract class Table[T]
+}
+
+object Config {
+ val driver : H2Driver = ???
+ def driver(app: Any): H2Driver = ???
+}
+
+class Sites extends Config.driver.Table[String]
diff --git a/test/files/pos/t7596b/B.scala b/test/files/pos/t7596b/B.scala
new file mode 100644
index 0000000000..cbcf149c23
--- /dev/null
+++ b/test/files/pos/t7596b/B.scala
@@ -0,0 +1,6 @@
+class DAOBase[E]{
+ type TableType <: Config.driver.Table[E]
+}
+class SitesDAO extends DAOBase[String]{
+ type TableType = Sites
+}
diff --git a/test/files/pos/t7596c/A_1.scala b/test/files/pos/t7596c/A_1.scala
new file mode 100644
index 0000000000..3e366df477
--- /dev/null
+++ b/test/files/pos/t7596c/A_1.scala
@@ -0,0 +1,11 @@
+trait Driver {
+ abstract class Table
+}
+
+object Config {
+ val driver : Driver = ???
+ val driverUniqueName: driver.type = driver
+ def driver(a: Any) = ???
+}
+
+object Sites extends Config.driver.Table
diff --git a/test/files/pos/t7596c/B_2.scala b/test/files/pos/t7596c/B_2.scala
new file mode 100644
index 0000000000..33da68c1ff
--- /dev/null
+++ b/test/files/pos/t7596c/B_2.scala
@@ -0,0 +1,9 @@
+object Test {
+ locally {
+ Sites: Config.driver.Table
+ }
+}
+
+// This variation worked by avoiding referring to the
+// overloaded term `Config.driver` in the parent type of
+// Sites \ No newline at end of file
diff --git a/test/files/pos/t7683-stop-after-parser/ThePlugin.scala b/test/files/pos/t7683-stop-after-parser/ThePlugin.scala
new file mode 100644
index 0000000000..cd800781dc
--- /dev/null
+++ b/test/files/pos/t7683-stop-after-parser/ThePlugin.scala
@@ -0,0 +1,31 @@
+package scala.test.plugins
+
+import scala.tools.nsc
+import nsc.Global
+import nsc.Phase
+import nsc.plugins.Plugin
+import nsc.plugins.PluginComponent
+
+class ThePlugin(val global: Global) extends Plugin {
+ import global._
+
+ val name = "timebomb"
+ val description = "Explodes if run. Maybe I haven't implemented it yet."
+ val components = List[PluginComponent](thePhase1)
+
+ private object thePhase1 extends PluginComponent {
+ val global = ThePlugin.this.global
+
+ val runsAfter = List[String]("parser")
+ override val runsBefore = List[String]("namer")
+ val phaseName = ThePlugin.this.name
+
+ def newPhase(prev: Phase) = new ThePhase(prev)
+ }
+
+ private class ThePhase(prev: Phase) extends Phase(prev) {
+ override def name = ThePlugin.this.name
+ override def run = ???
+ }
+}
+
diff --git a/test/files/pos/t7683-stop-after-parser/sample_2.flags b/test/files/pos/t7683-stop-after-parser/sample_2.flags
new file mode 100644
index 0000000000..99672cdfd3
--- /dev/null
+++ b/test/files/pos/t7683-stop-after-parser/sample_2.flags
@@ -0,0 +1 @@
+-Xplugin:. -Xplugin-require:timebomb -Ystop-after:parser
diff --git a/test/files/pos/t7683-stop-after-parser/sample_2.scala b/test/files/pos/t7683-stop-after-parser/sample_2.scala
new file mode 100644
index 0000000000..7eb11b8204
--- /dev/null
+++ b/test/files/pos/t7683-stop-after-parser/sample_2.scala
@@ -0,0 +1,6 @@
+
+package sample
+
+// just a sample that is compiled with the explosive plugin disabled
+object Sample extends App {
+}
diff --git a/test/files/pos/t7683-stop-after-parser/scalac-plugin.xml b/test/files/pos/t7683-stop-after-parser/scalac-plugin.xml
new file mode 100644
index 0000000000..2558d6fd03
--- /dev/null
+++ b/test/files/pos/t7683-stop-after-parser/scalac-plugin.xml
@@ -0,0 +1,5 @@
+<plugin>
+ <name>ignored</name>
+ <classname>scala.test.plugins.ThePlugin</classname>
+</plugin>
+
diff --git a/test/files/pos/t9018.scala b/test/files/pos/t9018.scala
new file mode 100644
index 0000000000..7fb4cf21b3
--- /dev/null
+++ b/test/files/pos/t9018.scala
@@ -0,0 +1,16 @@
+object TestObject {
+
+ def m(i: Int): AnyRef = i match {
+ case 0 => new C()
+ case 1 => Some(E.A).getOrElse("")
+ }
+
+ class C extends Ordered[C] {
+ def compare(that: C): Int = ???
+ }
+
+ object E extends Enumeration {
+ type CharacterClass = Value
+ val A = Value
+ }
+}
diff --git a/test/files/run/iterator-concat.check b/test/files/run/iterator-concat.check
deleted file mode 100644
index 23835b07ae..0000000000
--- a/test/files/run/iterator-concat.check
+++ /dev/null
@@ -1,4 +0,0 @@
-100
-1000
-10000
-100000
diff --git a/test/files/run/iterator-concat.scala b/test/files/run/iterator-concat.scala
deleted file mode 100644
index f11363410f..0000000000
--- a/test/files/run/iterator-concat.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-object Test {
- // Create `size` Function0s, each of which evaluates to an Iterator
- // which produces 1. Then fold them over ++ to get a single iterator,
- // which should sum to "size".
- def mk(size: Int): Iterator[Int] = {
- val closures = (1 to size).toList.map(x => (() => Iterator(1)))
- closures.foldLeft(Iterator.empty: Iterator[Int])((res, f) => res ++ f())
- }
- def main(args: Array[String]): Unit = {
- println(mk(100).sum)
- println(mk(1000).sum)
- println(mk(10000).sum)
- println(mk(100000).sum)
- }
-}
diff --git a/test/files/run/iterator-iterate-lazy.scala b/test/files/run/iterator-iterate-lazy.scala
deleted file mode 100644
index 92b170062e..0000000000
--- a/test/files/run/iterator-iterate-lazy.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-object Test {
- def main(args: Array[String]): Unit = {
- Iterator.iterate((1 to 5).toList)(_.tail).takeWhile(_.nonEmpty).map(_.head).toList
- }
-}
diff --git a/test/files/run/iterators.check b/test/files/run/iterators.check
deleted file mode 100644
index abaf80ff38..0000000000
--- a/test/files/run/iterators.check
+++ /dev/null
@@ -1,14 +0,0 @@
-test check_from was successful
-test check_range was successful
-test check_range2 was successful
-test check_range3 was successful
-test check_take was successful
-test check_drop was successful
-test check_slice was successful
-test check_foreach was successful
-test check_forall was successful
-test check_fromArray was successful
-test check_toSeq was successful
-test check_indexOf was successful
-test check_findIndexOf was successful
-
diff --git a/test/files/run/iterators.scala b/test/files/run/iterators.scala
deleted file mode 100644
index d54da3d3ba..0000000000
--- a/test/files/run/iterators.scala
+++ /dev/null
@@ -1,168 +0,0 @@
-//############################################################################
-// Iterators
-//############################################################################
-
-//############################################################################
-
-import scala.language.postfixOps
-
-object Test {
-
- def check_from: Int = {
- val it1 = Iterator.from(-1)
- val it2 = Iterator.from(0, -1)
- it1.next + it2.next
- }
-
- def check_range: Int = {
- val xs1 = Iterator.range(0, 10, 2) toList;
- val xs2 = Iterator.range(0, 10, -2) toList;
- val xs3 = Iterator.range(10, 0, -2) toList;
- val xs4 = Iterator.range(10, 0, 2) toList;
- val xs5 = Iterator.range(0, 10, 11) toList;
- xs1.length + xs2.length + xs3.length + xs4.length + xs5.length
- }
-
- def check_range2: Int = {
- val r1start = 0
- val r1end = 10
- val r1step = 1
- val r1 = Iterator.range(r1start, r1end, r1step) toList;
- val r2 = Iterator.range(r1start, r1end, r1step + 1) toList;
- val r3 = Iterator.range(r1end, r1start, -r1step) toList;
- val r4 = Iterator.range(0, 10, 11) toList;
- // 10 + 5 + 10 + 1
- r1.length + r2.length + r3.length + r4.length
- }
-
- def check_range3: Int = {
- def trues(xs: List[Boolean]) = xs.foldLeft(0)((a, b) => if (b) a+1 else a)
- val r1 = Iterator.range(0, 10)
- val xs1 = List(r1 contains 5, r1 contains 6)
- val r2a = Iterator.range(0, 10, 2)
- val r2b = Iterator.range(0, 10, 2)
- val xs2 = List(r2a contains 5, r2b contains 6)
- val r3 = Iterator.range(0, 10, 11)
- val xs3 = List(r3 contains 5, r3 contains 6)
- // 2 + 1 + 0
- trues(xs1) + trues(xs2) + trues(xs3)
- }
-
- def check_take: Int = {
- val it1 = Iterator.from(0)
- val xs1 = it1 take 10 toList;
- xs1.length
- }
-
- def check_drop: Int = {
- val tests = Array(
- Iterator.from(0).take(5).drop(1).toSeq sameElements Seq(1, 2, 3, 4),
- Iterator.from(0).take(5).drop(3).toSeq sameElements Seq(3, 4),
-
- Iterator.from(0).take(5).drop(5).toSeq sameElements Seq(),
- Iterator.from(0).take(5).drop(10).toSeq sameElements Seq(),
-
- Iterator.from(0).take(5).drop(0).toSeq sameElements Seq(0, 1, 2, 3, 4),
- Iterator.from(0).take(5).drop(-1).toSeq sameElements Seq(0, 1, 2, 3, 4),
-
- Iterator.from(0).take(5).drop(1).map(2 * _).toSeq sameElements Seq(2, 4, 6, 8),
- Iterator.from(0).take(5).map(2 * _).drop(1).toSeq sameElements Seq(2, 4, 6, 8),
-
- Iterator.from(0).take(5).drop(1).drop(2).toSeq sameElements Seq(3, 4),
- Iterator.from(0).take(5).drop(2).drop(1).toSeq sameElements Seq(3, 4)
- )
- tests.count(result => result)
- }
-
- def check_slice: Int = {
- val tests = Array(
- Iterator.from(0).slice(3, 7).toSeq sameElements Seq(3, 4, 5, 6),
- Iterator.from(0).slice(3, 3).toSeq sameElements Seq(),
- Iterator.from(0).slice(-1, 3).toSeq sameElements Seq(0, 1, 2),
- Iterator.from(0).slice(3, -1).toSeq sameElements Seq(),
-
- Iterator.from(0).slice(3, 7).map(2 * _).toSeq sameElements Seq(6, 8, 10, 12),
- Iterator.from(0).map(2 * _).slice(3, 7).toSeq sameElements Seq(6, 8, 10, 12),
-
- Iterator.from(0).slice(3, 7).drop(1).toSeq sameElements Seq(4, 5, 6),
- Iterator.from(0).drop(1).slice(3, 7).toSeq sameElements Seq(4, 5, 6, 7),
-
- Iterator.from(0).slice(3, 7).slice(1, 3).toSeq sameElements Seq(4, 5),
- Iterator.from(0).slice(3, 7).slice(1, 10).toSeq sameElements Seq(4, 5, 6)
- )
- tests.count(result => result)
- }
-
- def check_foreach: Int = {
- val it1 = Iterator.from(0) take 20
- var n = 0
- it1 foreach { n += _ }
- n
- }
-
- def check_forall: Int = {
- val it1 = Iterator.from(0)
- val it2 = Iterator.from(1)
- 0
- }
-
- def check_fromArray: Int = { // ticket #429
- val a = List(1, 2, 3, 4).toArray
- var xs0 = a.iterator.toList;
- var xs1 = a.slice(0, 1).iterator.toList;
- var xs2 = a.slice(0, 2).iterator.toList;
- var xs3 = a.slice(0, 3).iterator.toList;
- var xs4 = a.slice(0, 4).iterator.toList;
- xs0.length + xs1.length + xs2.length + xs3.length + xs4.length
- }
-
- def check_toSeq: String =
- List(1, 2, 3, 4, 5).iterator.toSeq.mkString("x")
-
- def check_indexOf: String = {
- val i = List(1, 2, 3, 4, 5).indexOf(4)
- val j = List(1, 2, 3, 4, 5).indexOf(16)
- "" + i + "x" + j
- }
-
- def check_findIndexOf: String = {
- val i = List(1, 2, 3, 4, 5).indexWhere { x: Int => x >= 4 }
- val j = List(1, 2, 3, 4, 5).indexWhere { x: Int => x >= 16 }
- "" + i + "x" + j
- }
-
- def check_success[A](name: String, closure: => A, expected: A) {
- print("test " + name)
- try {
- val actual: A = closure
- if (actual == expected)
- print(" was successful")
- else
- print(" failed: expected "+ expected +", found "+ actual)
- }
- catch {
- case exception: Throwable =>
- print(" raised exception " + exception)
- }
- println()
- }
-
- def main(args: Array[String]) {
- check_success("check_from", check_from, -1)
- check_success("check_range", check_range, 11)
- check_success("check_range2", check_range2, 26)
- check_success("check_range3", check_range3, 3)
- check_success("check_take", check_take, 10)
- check_success("check_drop", check_drop, 10)
- check_success("check_slice", check_slice, 10)
- check_success("check_foreach", check_foreach, 190)
- check_success("check_forall", check_forall, 0)
- check_success("check_fromArray",check_fromArray, 14)
- check_success("check_toSeq", check_toSeq, "1x2x3x4x5")
- check_success("check_indexOf", check_indexOf, "3x-1")
- check_success("check_findIndexOf", check_findIndexOf, "3x-1")
- println()
- }
-}
-
-//############################################################################
diff --git a/test/files/run/priorityQueue.scala b/test/files/run/priorityQueue.scala
deleted file mode 100644
index 327d8bf137..0000000000
--- a/test/files/run/priorityQueue.scala
+++ /dev/null
@@ -1,373 +0,0 @@
-
-
-
-import scala.collection.mutable.PriorityQueue
-
-
-
-
-
-
-// populate a priority queue a few different ways and make sure they all seem equal
-object Test {
-
- def main(args: Array[String]) {
- // testInsertionsAndEqualities
- // testIntensiveEnqueueDequeue
- // testTails
- // testInits
- // testFilters
- // testDrops
- // testEquality
- // testMisc
- // testReverse
- // testToList
- // testForeach
- }
-
- // def testInsertionsAndEqualities {
- // import scala.util.Random.nextInt
- // val pq1 = new PriorityQueue[String]
- // val pq2 = new PriorityQueue[String]
- // val pq3 = new PriorityQueue[String]
- // val pq4 = new PriorityQueue[String]
-
- // val strings = (1 to 20).toList map (i => List.fill((Math.abs(nextInt % 20)) + 1)("x").mkString)
-
- // pq1 ++= strings
- // pq2 ++= strings.reverse
- // for (s <- strings) pq3 += s
- // for (s <- strings.reverse) pq4 += s
-
- // val pqs = List(pq1, pq2, pq3, pq4, pq1.clone, pq2.clone)
-
- // for (queue1 <- pqs ; queue2 <- pqs) {
- // val l1: List[String] = queue1.dequeueAll[String, List[String]]
- // val l2: List[String] = queue2.dequeueAll[String, List[String]]
- // assert(l1 == l2)
- // assert(queue1.max == queue2.max)
- // }
-
- // assertPriorityDestructive(pq1)
- // }
-
- // not a sequence anymore, Mildred
- // def testIndexing {
- // val pq = new PriorityQueue[Char]
- // "The quick brown fox jumps over the lazy dog".foreach(pq += _)
-
- // // val iter = pq.iterator
- // // while (iter.hasNext) println("`" + iter.next + "`")
- // assert(pq(0) == 'z')
- // assert(pq(1) == 'y')
- // assert(pq(2) == 'x')
- // assert(pq(3) == 'w')
- // assert(pq(4) == 'v')
- // assert(pq(5) == 'u')
- // assert(pq(7) == 't')
- // assert(pq(8) == 's')
- // assert(pq(9) == 'r')
- // assert(pq(10) == 'r')
-
- // pq.clear
- // "abcdefghijklmnopqrstuvwxyz".foreach(pq += _)
- // for (i <- 0 until 26) assert(pq(i) == ('z' - i))
-
- // val intpq = new PriorityQueue[Int]
- // val intlst = new collection.mutable.ArrayBuffer ++ (0 until 100)
- // val random = new util.Random(101)
- // while (intlst.nonEmpty) {
- // val idx = random.nextInt(intlst.size)
- // intpq += intlst(idx)
- // intlst.remove(idx)
- // }
- // for (i <- 0 until 100) assert(intpq(i) == (99 - i))
- // }
-
- // def testTails {
- // val pq = new PriorityQueue[Int]
- // for (i <- 0 until 10) pq += i * 4321 % 200
-
- // assert(pq.size == 10)
- // assert(pq.nonEmpty)
-
- // val tailpq = pq.tail
- // // pq.printstate
- // // tailpq.printstate
- // assert(tailpq.size == 9)
- // assert(tailpq.nonEmpty)
- // assertPriorityDestructive(tailpq)
- // }
-
- // def assertPriorityDestructive[A](pq: PriorityQueue[A])(implicit ord: Ordering[A]) {
- // import ord._
- // var prev: A = null.asInstanceOf[A]
- // while (pq.nonEmpty) {
- // val curr = pq.dequeue
- // if (prev != null) assert(curr <= prev)
- // prev = curr
- // }
- // }
-
- // def testInits {
- // val pq = new PriorityQueue[Long]
- // for (i <- 0 until 20) pq += (i + 313) * 111 % 300
-
- // assert(pq.size == 20)
-
- // val initpq = pq.init
- // assert(initpq.size == 19)
- // assertPriorityDestructive(initpq)
- // }
-
- // def testFilters {
- // val pq = new PriorityQueue[String]
- // for (i <- 0 until 100) pq += "Some " + (i * 312 % 200)
-
- // val filpq = pq.filter(_.indexOf('0') != -1)
- // assertPriorityDestructive(filpq)
- // }
-
- // def testIntensiveEnqueueDequeue {
- // val pq = new PriorityQueue[Int]
-
- // testIntensive(1000, pq)
- // pq.clear
- // testIntensive(200, pq)
- // }
-
- // def testIntensive(sz: Int, pq: PriorityQueue[Int]) {
- // val lst = new collection.mutable.ArrayBuffer[Int] ++ (0 until sz)
- // val rand = new util.Random(7)
- // while (lst.nonEmpty) {
- // val idx = rand.nextInt(lst.size)
- // pq.enqueue(lst(idx))
- // lst.remove(idx)
- // if (rand.nextDouble < 0.25 && pq.nonEmpty) pq.dequeue
- // assertPriority(pq)
- // }
- // }
-
- // def testDrops {
- // val pq = new PriorityQueue[Int]
- // pq ++= (0 until 100)
- // val droppq = pq.drop(50)
- // assertPriority(droppq)
-
- // pq.clear
- // pq ++= droppq
- // assertPriorityDestructive(droppq)
- // assertPriority(pq)
- // assertPriorityDestructive(pq)
- // }
-
- // // your sequence days have ended, foul priority queue
- // // def testUpdates {
- // // val pq = new PriorityQueue[Int]
- // // pq ++= (0 until 36)
- // // assertPriority(pq)
-
- // // pq(0) = 100
- // // assert(pq(0) == 100)
- // // assert(pq.dequeue == 100)
- // // assertPriority(pq)
-
- // // pq.clear
-
- // // pq ++= (1 to 100)
- // // pq(5) = 200
- // // assert(pq(0) == 200)
- // // assert(pq(1) == 100)
- // // assert(pq(2) == 99)
- // // assert(pq(3) == 98)
- // // assert(pq(4) == 97)
- // // assert(pq(5) == 96)
- // // assert(pq(6) == 94)
- // // assert(pq(7) == 93)
- // // assert(pq(98) == 2)
- // // assert(pq(99) == 1)
- // // assertPriority(pq)
-
- // // pq(99) = 450
- // // assert(pq(0) == 450)
- // // assert(pq(1) == 200)
- // // assert(pq(99) == 2)
- // // assertPriority(pq)
-
- // // pq(1) = 0
- // // assert(pq(1) == 100)
- // // assert(pq(99) == 0)
- // // assertPriority(pq)
- // // assertPriorityDestructive(pq)
- // // }
-
- // def testEquality {
- // val pq1 = new PriorityQueue[Int]
- // val pq2 = new PriorityQueue[Int]
-
- // pq1 ++= (0 until 50)
- // var i = 49
- // while (i >= 0) {
- // pq2 += i
- // i -= 1
- // }
- // assert(pq1 == pq2)
- // assertPriority(pq2)
-
- // pq1 += 100
- // assert(pq1 != pq2)
- // pq2 += 100
- // assert(pq1 == pq2)
- // pq2 += 200
- // assert(pq1 != pq2)
- // pq1 += 200
- // assert(pq1 == pq2)
- // assertPriorityDestructive(pq1)
- // assertPriorityDestructive(pq2)
- // }
-
- // def testMisc {
- // val pq = new PriorityQueue[Int]
- // pq ++= (0 until 100)
- // assert(pq.size == 100)
-
- // val (p1, p2) = pq.partition(_ < 50)
- // assertPriorityDestructive(p1)
- // assertPriorityDestructive(p2)
-
- // val spq = pq.slice(25, 75)
- // assertPriorityDestructive(spq)
-
- // pq.clear
- // pq ++= (0 until 10)
- // pq += 5
- // assert(pq.size == 11)
-
- // val ind = pq.lastIndexWhere(_ == 5)
- // assert(ind == 5)
- // assertPriorityDestructive(pq)
-
- // pq.clear
- // pq ++= (0 until 10)
- // assert(pq.lastIndexWhere(_ == 9) == 0)
- // assert(pq.lastIndexOf(8) == 1)
- // assert(pq.lastIndexOf(7) == 2)
-
- // pq += 5
- // pq += 9
- // assert(pq.lastIndexOf(9) == 1)
- // assert(pq.lastIndexWhere(_ % 2 == 1) == 10)
- // assert(pq.lastIndexOf(5) == 6)
-
- // val lst = pq.reverseIterator.toList
- // for (i <- 0 until 5) assert(lst(i) == i)
- // assert(lst(5) == 5)
- // assert(lst(6) == 5)
- // assert(lst(7) == 6)
- // assert(lst(8) == 7)
- // assert(lst(9) == 8)
- // assert(lst(10) == 9)
- // assert(lst(11) == 9)
-
- // pq.clear
- // assert(pq.reverseIterator.toList.isEmpty)
-
- // pq ++= (50 to 75)
- // assert(pq.lastIndexOf(70) == 5)
-
- // pq += 55
- // pq += 70
- // assert(pq.lastIndexOf(70) == 6)
- // assert(pq.lastIndexOf(55) == 22)
- // assert(pq.lastIndexOf(55, 21) == 21)
- // assert(pq.lastIndexWhere(_ > 54) == 22)
- // assert(pq.lastIndexWhere(_ > 54, 21) == 21)
- // assert(pq.lastIndexWhere(_ > 69, 5) == 5)
- // }
-
- // def testReverse {
- // val pq = new PriorityQueue[(Int, Int)]
- // pq ++= (for (i <- 0 until 10) yield (i, i * i % 10))
-
- // assert(pq.reverse.size == pq.reverseIterator.toList.size)
- // assert((pq.reverse zip pq.reverseIterator.toList).forall(p => p._1 == p._2))
- // assert(pq.reverse.sameElements(pq.reverseIterator.toSeq))
- // assert(pq.reverse(0)._1 == pq(9)._1)
- // assert(pq.reverse(1)._1 == pq(8)._1)
- // assert(pq.reverse(4)._1 == pq(5)._1)
- // assert(pq.reverse(9)._1 == pq(0)._1)
-
- // pq += ((7, 7))
- // pq += ((7, 9))
- // pq += ((7, 8))
- // assert(pq.reverse.reverse == pq)
- // assert(pq.reverse.lastIndexWhere(_._2 == 6) == 6)
- // assertPriorityDestructive(pq.reverse.reverse)
-
- // val iq = new PriorityQueue[Int]
- // iq ++= (0 until 50)
- // assert(iq.reverse == iq.reverseIterator.toSeq)
- // assert(iq.reverse.reverse == iq)
-
- // iq += 25
- // iq += 40
- // iq += 10
- // assert(iq.reverse == iq.reverseIterator.toList)
- // assert(iq.reverse.reverse == iq)
- // assert(iq.reverse.lastIndexWhere(_ == 10) == 11)
- // assertPriorityDestructive(iq.reverse.reverse)
- // }
-
- // def testToList {
- // val pq = new PriorityQueue[Int]
-
- // pq += 1
- // pq += 4
- // pq += 0
- // pq += 5
- // pq += 3
- // pq += 2
- // assert(pq.toList == pq)
- // assert(pq == List(5, 4, 3, 2, 1, 0))
- // assert(pq.reverse == List(0, 1, 2, 3, 4, 5))
-
- // pq.clear
- // for (i <- -50 until 50) pq += i
- // assert(pq.toList == pq)
- // assert(pq.toList == (-50 until 50).reverse)
- // }
-
- // def testForeach {
- // val pq = new PriorityQueue[Char]
-
- // pq += 't'
- // pq += 'o'
- // pq += 'b'
- // pq += 'y'
- // val sbf = new StringBuilder
- // val sbi = new StringBuilder
- // pq.foreach(sbf += _)
- // pq.iterator.foreach(sbi += _)
- // assert(sbf.toString == sbi.toString)
- // assert(sbf.toString == "ytob")
- // }
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/files/run/t2866.check b/test/files/run/t2866.check
new file mode 100644
index 0000000000..7f52da85fb
--- /dev/null
+++ b/test/files/run/t2866.check
@@ -0,0 +1,3 @@
+t2866.scala:30: warning: imported `one' is permanently hidden by definition of value one
+ import A.one // warning: imported `one' is permanently hidden by definition of value one.
+ ^
diff --git a/test/files/run/t2866.scala b/test/files/run/t2866.scala
new file mode 100644
index 0000000000..8059107583
--- /dev/null
+++ b/test/files/run/t2866.scala
@@ -0,0 +1,44 @@
+// for 2.7.x compatibility
+
+object A {
+ implicit val one = 1
+}
+
+object Test extends App {
+
+ locally {
+ import A._
+ locally {
+ // assert(implicitly[Int] == 1) // error: could not find implicit value for parameter e: Int.
+ // !!! Why one A.one?
+ // (I assume you mean: why _not_ A.one? A.one is shadowed by local one.
+ // but the local one cannot be used yet because it does not have an explicit type.
+ implicit val one = 2
+ assert(implicitly[Int] == 2)
+ assert(one == 2)
+ }
+ }
+
+ locally {
+ import A._
+ implicit val one: Int = 2
+ assert(implicitly[Int] == 2)
+ assert(one == 2)
+ }
+
+ locally {
+ import A.one // warning: imported `one' is permanently hidden by definition of value one.
+ // !!! Really?
+ //assert(implicitly[Int] == 1)
+ implicit val one = 2
+ assert(implicitly[Int] == 2) // !!! why not 2?
+ assert(one == 2)
+ }
+
+ locally {
+ import A.{one => _, _}
+ implicit val two = 2
+ assert(implicitly[Int] == 2) // not ambiguous in 2.8.0 nor im ambiguous in 2.7.6
+ }
+
+}
diff --git a/test/files/run/t3516.check b/test/files/run/t3516.check
deleted file mode 100644
index d0d10d82fa..0000000000
--- a/test/files/run/t3516.check
+++ /dev/null
@@ -1,3 +0,0 @@
-1
-1
-21
diff --git a/test/files/run/t3516.scala b/test/files/run/t3516.scala
deleted file mode 100644
index aa302ce85a..0000000000
--- a/test/files/run/t3516.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-object Test {
- def mkIterator = (1 to 5).iterator map (x => { println(x) ; x })
- def mkInfinite = Iterator continually { println(1) ; 1 }
-
- def main(args: Array[String]): Unit = {
- // Stream is strict in its head so we should see 1 from each of them.
- val s1 = mkIterator.toStream
- val s2 = mkInfinite.toStream
- // back and forth without slipping into nontermination.
- println((Stream from 1).toIterator.drop(10).toStream.drop(10).toIterator.next)
- ()
- }
-}
diff --git a/test/files/run/t5938.scala b/test/files/run/t5938.scala
new file mode 100644
index 0000000000..59a95ac37f
--- /dev/null
+++ b/test/files/run/t5938.scala
@@ -0,0 +1,35 @@
+import scala.tools.partest.DirectTest
+
+object Test extends DirectTest {
+
+ override def extraSettings: String =
+ s"-usejavacp -d ${testOutput.path}"
+
+ override def code = """
+object O extends C {
+ def main(args: Array[String]): Unit = {
+ }
+ // Static forwarder for foo and setter_foo_= added more once in a multi-run compile.
+}
+ """.trim
+
+ override def show(): Unit = {
+ val global = newCompiler()
+ Console.withErr(System.out) {
+ compileString(global)(code)
+ compileString(global)(code)
+ loadClass // was "duplicate name and signature in class X"
+ }
+ }
+
+ def loadClass: Class[_] = {
+ val cl = new java.net.URLClassLoader(Array(testOutput.toFile.toURL));
+ cl.loadClass("O")
+ }
+}
+
+trait T {
+ val foo: String = ""
+}
+class C extends T
+
diff --git a/test/files/run/t6440.check b/test/files/run/t6440.check
index 2358f08fcc..4d8618182b 100644
--- a/test/files/run/t6440.check
+++ b/test/files/run/t6440.check
@@ -1,5 +1,4 @@
-pos: source-newSource1.scala,line-9,offset=109 missing or invalid dependency detected while loading class file 'U.class'.
-Could not access term pack1 in package <root>,
-because it (or its dependencies) are missing. Check your build definition for
-missing or conflicting dependencies. (Re-run with `-Ylog-classpath` to see the problematic classpath.)
-A full rebuild may help if 'U.class' was compiled against an incompatible version of <root>. ERROR
+pos: source-newSource1.scala,line-9,offset=109 reference to U is ambiguous;
+it is imported twice in the same scope by
+import pack2._
+and import X._ ERROR
diff --git a/test/files/run/t6440.scala b/test/files/run/t6440.scala
index 5a3a4150d9..94eda3642e 100644
--- a/test/files/run/t6440.scala
+++ b/test/files/run/t6440.scala
@@ -41,7 +41,7 @@ object Test extends StoreReporterDirectTest {
assert(tClass.delete())
assert(pack1.delete())
- // bad symbolic reference error expected (but no stack trace!)
+ // should report ambiguous import, despite the fact that a parent of pack2.U is absent
compileCode(app)
println(filteredInfos.mkString("\n"))
}
diff --git a/test/files/run/t6502.check b/test/files/run/t6502.check
new file mode 100644
index 0000000000..95d36ee221
--- /dev/null
+++ b/test/files/run/t6502.check
@@ -0,0 +1,8 @@
+test1 res1: true
+test1 res2: true
+test2 res1: true
+test2 res2: true
+test3 res1: true
+test3 res2: true
+test4 res1: true
+test4 res2: true
diff --git a/test/files/run/t6502.scala b/test/files/run/t6502.scala
new file mode 100644
index 0000000000..ced1b5812d
--- /dev/null
+++ b/test/files/run/t6502.scala
@@ -0,0 +1,101 @@
+import scala.tools.partest._
+import java.io.File
+import scala.tools.nsc.interpreter.ILoop
+
+object Test extends StoreReporterDirectTest {
+ def code = ???
+
+ def compileCode(code: String, jarFileName: String) = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", s"${testOutput.path}/$jarFileName"))(code)
+ }
+
+ def app1 = """
+ package test
+
+ object Test extends App {
+ def test(): Unit = {
+ println("testing...")
+ }
+ }"""
+
+ def app2 = """
+ package test
+
+ object Test extends App {
+ def test(): Unit = {
+ println("testing differently...")
+ }
+ }"""
+
+ def app3 = """
+ package test
+
+ object Test3 extends App {
+ def test(): Unit = {
+ println("new object in existing package")
+ }
+ }"""
+
+ def test1(): Unit = {
+ val jar = "test1.jar"
+ compileCode(app1, jar)
+
+ val output = ILoop.run(List(s":require ${testOutput.path}/$jar", "test.Test.test()"))
+ val lines = output.split("\n")
+ val res1 = lines(4).contains("Added") && lines(4).contains("test1.jar")
+ val res2 = lines(lines.length-3).contains("testing...")
+
+ println(s"test1 res1: $res1")
+ println(s"test1 res2: $res2")
+ }
+
+ def test2(): Unit = {
+ // should reject jars with conflicting entries
+ val jar1 = "test1.jar"
+ val jar2 = "test2.jar"
+ compileCode(app2, jar2)
+
+ val output = ILoop.run(List(s":require ${testOutput.path}/$jar1", s":require ${testOutput.path}/$jar2"))
+ val lines = output.split("\n")
+ val res1 = lines(4).contains("Added") && lines(4).contains("test1.jar")
+ val res2 = lines(lines.length-3).contains("test2.jar") && lines(lines.length-3).contains("existing classpath entries conflict")
+
+ println(s"test2 res1: $res1")
+ println(s"test2 res2: $res2")
+ }
+
+ def test3(): Unit = {
+ // should accept jars with overlapping packages, but no conflicts
+ val jar1 = "test1.jar"
+ val jar3 = "test3.jar"
+ compileCode(app3, jar3)
+
+ val output = ILoop.run(List(s":require ${testOutput.path}/$jar1", s":require ${testOutput.path}/$jar3", "test.Test3.test()"))
+ val lines = output.split("\n")
+ val res1 = lines(4).contains("Added") && lines(4).contains("test1.jar")
+ val res2 = lines(lines.length-3).contains("new object in existing package")
+
+ println(s"test3 res1: $res1")
+ println(s"test3 res2: $res2")
+ }
+
+ def test4(): Unit = {
+ // twice the same jar should be rejected
+ val jar1 = "test1.jar"
+ val output = ILoop.run(List(s":require ${testOutput.path}/$jar1", s":require ${testOutput.path}/$jar1"))
+ val lines = output.split("\n")
+ val res1 = lines(4).contains("Added") && lines(4).contains("test1.jar")
+ val res2 = lines(lines.length-3).contains("test1.jar") && lines(lines.length-3).contains("existing classpath entries conflict")
+
+ println(s"test4 res1: $res1")
+ println(s"test4 res2: $res2")
+ }
+
+ def show(): Unit = {
+ test1()
+ test2()
+ test3()
+ test4()
+ }
+}
diff --git a/test/files/run/t7407.flags b/test/files/run/t7407.flags
index c8547a27dc..be4ef0798a 100644
--- a/test/files/run/t7407.flags
+++ b/test/files/run/t7407.flags
@@ -1 +1 @@
--Ynooptimise -Ybackend:GenBCode
+-Ynooptimise -Yopt:l:none -Ybackend:GenBCode
diff --git a/test/files/run/t8253.check b/test/files/run/t8253.check
new file mode 100644
index 0000000000..0b4cb2d1f7
--- /dev/null
+++ b/test/files/run/t8253.check
@@ -0,0 +1,40 @@
+
+<sample xmlns='ns1'/>
+{
+ var $tmpscope: _root_.scala.xml.NamespaceBinding = $scope;
+ $tmpscope = new _root_.scala.xml.NamespaceBinding(null, "ns1", $tmpscope);
+ {
+ val $scope: _root_.scala.xml.NamespaceBinding = $tmpscope;
+ new _root_.scala.xml.Elem(null, "sample", _root_.scala.xml.Null, $scope, true)
+ }
+}
+
+<sample xmlns={identity(ns1)}/>
+{
+ var $tmpscope: _root_.scala.xml.NamespaceBinding = $scope;
+ $tmpscope = new _root_.scala.xml.NamespaceBinding(null, ns1, $tmpscope);
+ {
+ val $scope: _root_.scala.xml.NamespaceBinding = $tmpscope;
+ new _root_.scala.xml.Elem(null, "sample", _root_.scala.xml.Null, $scope, true)
+ }
+}
+
+<sample xmlns:foo='ns1'/>
+{
+ var $tmpscope: _root_.scala.xml.NamespaceBinding = $scope;
+ $tmpscope = new _root_.scala.xml.NamespaceBinding("foo", "ns1", $tmpscope);
+ {
+ val $scope: _root_.scala.xml.NamespaceBinding = $tmpscope;
+ new _root_.scala.xml.Elem(null, "sample", _root_.scala.xml.Null, $scope, true)
+ }
+}
+
+<sample xmlns:foo={identity(ns1)}/>
+{
+ var $tmpscope: _root_.scala.xml.NamespaceBinding = $scope;
+ $tmpscope = new _root_.scala.xml.NamespaceBinding("foo", ns1, $tmpscope);
+ {
+ val $scope: _root_.scala.xml.NamespaceBinding = $tmpscope;
+ new _root_.scala.xml.Elem(null, "sample", _root_.scala.xml.Null, $scope, true)
+ }
+}
diff --git a/test/files/run/t8253.scala b/test/files/run/t8253.scala
new file mode 100644
index 0000000000..c4800b4491
--- /dev/null
+++ b/test/files/run/t8253.scala
@@ -0,0 +1,14 @@
+object Test extends App {
+ import reflect.runtime.universe._ // not using the XML library in compiler tests
+
+ def show(code: String, t: Tree) = println(s"\n$code\n$t")
+
+ val ns1 = "ns1"
+ show("<sample xmlns='ns1'/>", q"<sample xmlns='ns1'/>")
+ show("<sample xmlns={identity(ns1)}/>", q"<sample xmlns={ns1}/>")
+ show("<sample xmlns:foo='ns1'/>", q"<sample xmlns:foo='ns1'/>")
+ show("<sample xmlns:foo={identity(ns1)}/>", q"<sample xmlns:foo={ns1}/>")
+
+ // `identity(foo)` used to match the overly permissive match in SymbolXMLBuilder
+ // which was intented to more specifically match `_root_.scala.xml.Text(...)`
+}
diff --git a/test/files/run/t8502.scala b/test/files/run/t8502.scala
new file mode 100644
index 0000000000..903e573711
--- /dev/null
+++ b/test/files/run/t8502.scala
@@ -0,0 +1,41 @@
+import scala.tools.partest._
+import java.io.File
+
+object Test extends StoreReporterDirectTest {
+ def code = ???
+
+ def compileCode(code: String) = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code)
+ }
+
+ def show(): Unit = {
+ compileCode("""
+ object U {
+ def foo(log: vanishing.Vanishing) = ()
+ }
+
+ package vanishing {
+ class Vanishing
+ }
+ """)
+ assert(filteredInfos.isEmpty, filteredInfos)
+ deletePackage("vanishing")
+ compileCode("""
+ class Test {
+ U
+ }
+ """)
+ assert(storeReporter.infos.isEmpty, storeReporter.infos.mkString("\n")) // Included a MissingRequirementError before.
+ }
+
+ def deletePackage(name: String) {
+ val directory = new File(testOutput.path, name)
+ for (f <- directory.listFiles()) {
+ assert(f.getName.endsWith(".class"))
+ assert(f.delete())
+ }
+ assert(directory.listFiles().isEmpty)
+ assert(directory.delete())
+ }
+}
diff --git a/test/files/run/t8925.check b/test/files/run/t8925.check
new file mode 100644
index 0000000000..112e7005df
--- /dev/null
+++ b/test/files/run/t8925.check
@@ -0,0 +1,2 @@
+bar
+abcd
diff --git a/test/files/run/t8925.flags b/test/files/run/t8925.flags
new file mode 100644
index 0000000000..be4ef0798a
--- /dev/null
+++ b/test/files/run/t8925.flags
@@ -0,0 +1 @@
+-Ynooptimise -Yopt:l:none -Ybackend:GenBCode
diff --git a/test/files/run/t8925.scala b/test/files/run/t8925.scala
new file mode 100644
index 0000000000..33f4505f03
--- /dev/null
+++ b/test/files/run/t8925.scala
@@ -0,0 +1,31 @@
+object Ex {
+ def unapply(t: Throwable): Option[Throwable] = Some(t)
+}
+
+class A {
+ var x = ""
+
+ def bar =
+ try {
+ "bar"
+ } finally {
+ try {
+ x += "a"
+ } finally {
+ x += "b"
+ try {
+ x += "c"
+ throw null
+ } catch {
+ case Ex(_) =>
+ x += "d"
+ }
+ }
+ }
+}
+
+object Test extends App {
+ val a = new A
+ println(a.bar)
+ println(a.x)
+}
diff --git a/test/files/run/t9003.flags b/test/files/run/t9003.flags
new file mode 100644
index 0000000000..49d036a887
--- /dev/null
+++ b/test/files/run/t9003.flags
@@ -0,0 +1 @@
+-optimize
diff --git a/test/files/run/t9003.scala b/test/files/run/t9003.scala
new file mode 100644
index 0000000000..4f24712201
--- /dev/null
+++ b/test/files/run/t9003.scala
@@ -0,0 +1,71 @@
+object Single {
+ var i = 0
+ def isEmpty = false
+ def get = i
+ def unapply(a: Single.type) = this
+}
+
+object Product {
+ var i = 0
+ def _1: Int = i
+ def _2: String = ???
+ def productArity = 2
+ def unapply(a: Product.type) = this
+ def isEmpty = false
+ def get: this.type = this
+}
+
+object Sequence {
+ var i = 0
+ def apply(n: Int): Int = i
+ def length = 2
+ def unapplySeq(a: Sequence.type) = this
+ def isEmpty = false
+ def get = this
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ def assertZero(i: Int) = assert(i == 0)
+
+ Single match {
+ case Single(i) =>
+ Single.i = 1
+ assertZero(i) // fails under -optimize
+ }
+
+ Product match {
+ case Product(i, _) =>
+ Product.i = 1
+ assertZero(i) // fails under -optimize
+ }
+
+ Sequence match {
+ case Sequence(i, _ @ _*) =>
+ Sequence.i = 1
+ assertZero(i) // okay
+ }
+
+ Sequence.i = 0
+ Sequence match {
+ case Sequence(_, i) =>
+ Sequence.i = 1
+ assertZero(i) // okay
+ }
+
+ val buffer = collection.mutable.Buffer(0, 0)
+ buffer match {
+ case Seq(_, i) =>
+ buffer(1) = 1
+ assertZero(i) // failed
+ }
+
+ case class CaseSequence(as: Int*)
+ val buffer1 = collection.mutable.Buffer(0, 0)
+ CaseSequence(buffer1: _*) match {
+ case CaseSequence(_, i) =>
+ buffer1(1) = 1
+ assertZero(i) // failed
+ }
+ }
+}
diff --git a/test/files/run/t9027.check b/test/files/run/t9027.check
new file mode 100644
index 0000000000..3429254286
--- /dev/null
+++ b/test/files/run/t9027.check
@@ -0,0 +1,19 @@
+{
+ {
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.Elem(null, "a", _root_.scala.xml.Null, $scope, true));
+ $buf.$amp$plus(new _root_.scala.xml.Elem(null, "b", _root_.scala.xml.Null, $scope, true));
+ $buf
+ };
+ println("hello, world.")
+}
+{
+ {
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.Elem(null, "a", _root_.scala.xml.Null, $scope, true));
+ $buf.$amp$plus(new _root_.scala.xml.Elem(null, "b", _root_.scala.xml.Null, $scope, true));
+ $buf.$amp$plus(new _root_.scala.xml.Elem(null, "c", _root_.scala.xml.Null, $scope, true));
+ $buf
+ };
+ println("hello, world.")
+}
diff --git a/test/files/run/t9027.scala b/test/files/run/t9027.scala
new file mode 100644
index 0000000000..26238147da
--- /dev/null
+++ b/test/files/run/t9027.scala
@@ -0,0 +1,15 @@
+
+// used to be parsed as .println
+object Test extends App {
+ import reflect.runtime._, universe._
+
+ val trees = List(
+ q"""<a/><b/>
+ println("hello, world.")""",
+ q"""<a/>
+ <b/>
+ <c/>
+ println("hello, world.")"""
+ )
+ trees foreach println
+}
diff --git a/test/files/t8449/Client.scala b/test/files/t8449/Client.scala
new file mode 100644
index 0000000000..5d273f06b2
--- /dev/null
+++ b/test/files/t8449/Client.scala
@@ -0,0 +1,3 @@
+object Client {
+ def foo: Any = new Test().foo
+}
diff --git a/test/files/t8449/Test.java b/test/files/t8449/Test.java
new file mode 100644
index 0000000000..ecb1711b24
--- /dev/null
+++ b/test/files/t8449/Test.java
@@ -0,0 +1,10 @@
+public class Test {
+ // Raw type over a Scala type constructor
+ public scala.Function1 foo() { return null; }
+ // scalac reported:
+ // % scalac-hash v2.11.2 -d /tmp sandbox/{Test.java,Client.scala}
+ // sandbox/Test.java:2: error: trait Function1 takes type parameters
+ // public scala.Function1 foo() { return null; }
+ // ^
+ // one error found
+}
diff --git a/test/junit/scala/collection/IndexedSeqOptimizedTest.scala b/test/junit/scala/collection/IndexedSeqOptimizedTest.scala
index e5382907af..419e1454cb 100644
--- a/test/junit/scala/collection/IndexedSeqOptimizedTest.scala
+++ b/test/junit/scala/collection/IndexedSeqOptimizedTest.scala
@@ -13,4 +13,17 @@ class IndexedSeqOptimizedTest {
assertEquals(0, (Array(2): collection.mutable.WrappedArray[Int]).lastIndexWhere(_ => true, 1))
assertEquals(2, "abc123".lastIndexWhere(_.isLetter, 6))
}
+
+ @Test
+ def hasCorrectDropAndTakeMethods() {
+ assertEquals("", "abc" take Int.MinValue)
+ assertEquals("", "abc" takeRight Int.MinValue)
+ assertEquals("abc", "abc" drop Int.MinValue)
+ assertEquals("abc", "abc" dropRight Int.MinValue)
+
+ assertArrayEquals(Array.empty[Int], Array(1, 2, 3) take Int.MinValue)
+ assertArrayEquals(Array.empty[Int], Array(1, 2, 3) takeRight Int.MinValue)
+ assertArrayEquals(Array(1, 2, 3), Array(1, 2, 3) drop Int.MinValue)
+ assertArrayEquals(Array(1, 2, 3), Array(1, 2, 3) dropRight Int.MinValue)
+ }
}
diff --git a/test/junit/scala/collection/IterableViewLikeTest.scala b/test/junit/scala/collection/IterableViewLikeTest.scala
new file mode 100644
index 0000000000..55da02744b
--- /dev/null
+++ b/test/junit/scala/collection/IterableViewLikeTest.scala
@@ -0,0 +1,20 @@
+package scala.collection
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class IterableViewLikeTest {
+
+ @Test
+ def hasCorrectDropAndTakeMethods() {
+ val iter = Iterable(1, 2, 3)
+
+ assertEquals(Iterable.empty[Int], iter.view take Int.MinValue force)
+ assertEquals(Iterable.empty[Int], iter.view takeRight Int.MinValue force)
+ assertEquals(iter, iter.view drop Int.MinValue force)
+ assertEquals(iter, iter.view dropRight Int.MinValue force)
+ }
+}
diff --git a/test/junit/scala/collection/IteratorTest.scala b/test/junit/scala/collection/IteratorTest.scala
index b7a9805c9f..d5389afd0c 100644
--- a/test/junit/scala/collection/IteratorTest.scala
+++ b/test/junit/scala/collection/IteratorTest.scala
@@ -6,11 +6,14 @@ import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
+import scala.tools.testing.AssertUtil._
+
+import Seq.empty
+
@RunWith(classOf[JUnit4])
class IteratorTest {
- @Test
- def groupedIteratorShouldNotAskForUnneededElement(): Unit = {
+ @Test def groupedIteratorShouldNotAskForUnneededElement(): Unit = {
var counter = 0
val it = new Iterator[Int] { var i = 0 ; def hasNext = { counter = i; true } ; def next = { i += 1; i } }
val slidingIt = it sliding 2
@@ -25,4 +28,130 @@ class IteratorTest {
slidingIt.next
assertEquals("Counter should be one, that means we didn't look further than needed", 1, counter)
}
+
+ @Test def dropDoesNotGrowStack(): Unit = {
+ def it = new Iterator[Throwable] { def hasNext = true ; def next = new Throwable }
+
+ assertEquals(it.drop(1).next.getStackTrace.length, it.drop(1).drop(1).next.getStackTrace.length)
+ }
+
+ @Test def dropIsChainable(): Unit = {
+ assertSameElements(1 to 4, Iterator from 0 take 5 drop 1)
+ assertSameElements(3 to 4, Iterator from 0 take 5 drop 3)
+ assertSameElements(empty, Iterator from 0 take 5 drop 5)
+ assertSameElements(empty, Iterator from 0 take 5 drop 10)
+ assertSameElements(0 to 4, Iterator from 0 take 5 drop 0)
+ assertSameElements(0 to 4, Iterator from 0 take 5 drop -1)
+ assertSameElements(2 to 8 by 2, Iterator from 0 take 5 drop 1 map (2 * _))
+ assertSameElements(2 to 8 by 2, Iterator from 0 take 5 map (2 * _) drop 1)
+ assertSameElements(3 to 4, Iterator from 0 take 5 drop 1 drop 2)
+ assertSameElements(3 to 4, Iterator from 0 take 5 drop 2 drop 1)
+ }
+
+ @Test def sliceIsChainable(): Unit = {
+ assertSameElements(3 to 6, Iterator from 0 slice (3, 7))
+ assertSameElements(empty, Iterator from 0 slice (3, 3))
+ assertSameElements(0 to 2, Iterator from 0 slice (-1, 3))
+ assertSameElements(empty, Iterator from 0 slice (3, -1))
+ assertSameElements(6 to 12 by 2, Iterator from 0 slice (3, 7) map (2 * _))
+ assertSameElements(6 to 12 by 2, Iterator from 0 map (2 * _) slice (3, 7))
+ assertSameElements(4 to 6, Iterator from 0 slice (3, 7) drop 1)
+ assertSameElements(4 to 7, Iterator from 0 drop 1 slice (3, 7))
+ assertSameElements(4 to 5, Iterator from 0 slice (3, 7) slice (1, 3))
+ assertSameElements(4 to 6, Iterator from 0 slice (3, 7) slice (1, 10))
+ }
+
+ // test/files/run/iterator-concat.scala
+ @Test def concatIsStackFriendly(): Unit = {
+ // Create `size` Function0s, each of which evaluates to an Iterator
+ // which produces 1. Then fold them over ++ to get a single iterator,
+ // which should sum to "size".
+ def mk(size: Int): Iterator[Int] = {
+ //val closures = (1 to size).toList.map(x => (() => Iterator(1)))
+ //closures.foldLeft(Iterator.empty: Iterator[Int])((res, f) => res ++ f())
+ List.fill(size)(() => Iterator(1)).foldLeft(Iterator.empty: Iterator[Int])((res, f) => res ++ f())
+ }
+ assertEquals(100, mk(100).sum)
+ assertEquals(1000, mk(1000).sum)
+ assertEquals(10000, mk(10000).sum)
+ assertEquals(100000, mk(100000).sum)
+ }
+
+ @Test def from(): Unit = {
+ val it1 = Iterator.from(-1)
+ val it2 = Iterator.from(0, -1)
+ assertEquals(-1, it1.next())
+ assertEquals(0, it2.next())
+ }
+ @Test def range(): Unit = {
+ assertEquals(5, Iterator.range(0, 10, 2).size)
+ assertEquals(0, Iterator.range(0, 10, -2).size)
+ assertEquals(5, Iterator.range(10, 0, -2).size)
+ assertEquals(0, Iterator.range(10, 0, 2).size)
+ assertEquals(1, Iterator.range(0, 10, 11).size)
+ assertEquals(10, Iterator.range(0, 10, 1).size)
+ assertEquals(10, Iterator.range(10, 0, -1).size)
+ }
+ @Test def range3(): Unit = {
+ val r1 = Iterator.range(0, 10)
+ assertTrue(r1 contains 5)
+ assertTrue(r1 contains 6)
+ assertFalse(r1 contains 4)
+ val r2a = Iterator.range(0, 10, 2)
+ assertFalse(r2a contains 5)
+ val r2b = Iterator.range(0, 10, 2)
+ assertTrue(r2b contains 6)
+ val r3 = Iterator.range(0, 10, 11)
+ assertFalse(r3 contains 5)
+ assertTrue(r3.isEmpty)
+ }
+ @Test def take(): Unit = {
+ assertEquals(10, (Iterator from 0 take 10).size)
+ }
+ @Test def foreach(): Unit = {
+ val it1 = Iterator.from(0) take 20
+ var n = 0
+ it1 foreach { n += _ }
+ assertEquals(190, n)
+ }
+ // ticket #429
+ @Test def fromArray(): Unit = {
+ val a = List(1, 2, 3, 4).toArray
+ var xs0 = a.iterator.toList;
+ var xs1 = a.slice(0, 1).iterator
+ var xs2 = a.slice(0, 2).iterator
+ var xs3 = a.slice(0, 3).iterator
+ var xs4 = a.slice(0, 4).iterator
+ assertEquals(14, xs0.size + xs1.size + xs2.size + xs3.size + xs4.size)
+ }
+ @Test def toSeq(): Unit = {
+ assertEquals("1x2x3x4x5", List(1, 2, 3, 4, 5).iterator.mkString("x"))
+ }
+ @Test def indexOf(): Unit = {
+ assertEquals(3, List(1, 2, 3, 4, 5).iterator.indexOf(4))
+ assertEquals(-1, List(1, 2, 3, 4, 5).iterator.indexOf(16))
+ }
+ @Test def indexWhere(): Unit = {
+ assertEquals(3, List(1, 2, 3, 4, 5).iterator.indexWhere { x: Int => x >= 4 })
+ assertEquals(-1, List(1, 2, 3, 4, 5).iterator.indexWhere { x: Int => x >= 16 })
+ }
+ // iterator-iterate-lazy.scala
+ // was java.lang.UnsupportedOperationException: tail of empty list
+ @Test def iterateIsSufficientlyLazy(): Unit = {
+ //Iterator.iterate((1 to 5).toList)(_.tail).takeWhile(_.nonEmpty).toList // suffices
+ Iterator.iterate((1 to 5).toList)(_.tail).takeWhile(_.nonEmpty).map(_.head).toList
+ }
+ // SI-3516
+ @Test def toStreamIsSufficientlyLazy(): Unit = {
+ val results = collection.mutable.ListBuffer.empty[Int]
+ def mkIterator = (1 to 5).iterator map (x => { results += x ; x })
+ def mkInfinite = Iterator continually { results += 1 ; 1 }
+
+ // Stream is strict in its head so we should see 1 from each of them.
+ val s1 = mkIterator.toStream
+ val s2 = mkInfinite.toStream
+ // back and forth without slipping into nontermination.
+ results += (Stream from 1).toIterator.drop(10).toStream.drop(10).toIterator.next()
+ assertSameElements(List(1,1,21), results)
+ }
}
diff --git a/test/junit/scala/collection/immutable/ListTest.scala b/test/junit/scala/collection/immutable/ListTest.scala
new file mode 100644
index 0000000000..1006801029
--- /dev/null
+++ b/test/junit/scala/collection/immutable/ListTest.scala
@@ -0,0 +1,49 @@
+package scala.collection.immutable
+
+import org.junit.{Assert, Test}
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.ref.WeakReference
+
+@RunWith(classOf[JUnit4])
+class ListTest {
+ /**
+ * Test that empty iterator does not hold reference
+ * to complete List
+ */
+ @Test
+ def testIteratorGC(): Unit = {
+ var num = 0
+ var emptyIterators = Seq.empty[(Iterator[Int], WeakReference[List[Int]])]
+
+ do {
+ val list = List.fill(10000)(num)
+ val ref = WeakReference(list)
+
+ val i = list.iterator
+
+ while (i.hasNext) i.next()
+
+ emptyIterators = (i, ref) +: emptyIterators
+
+ num+=1
+ } while (emptyIterators.forall(_._2.get.isDefined) && num<1000)
+
+ // check something is result to protect from JIT optimizations
+ for ((i, _) <- emptyIterators) {
+ Assert.assertTrue(i.isEmpty)
+ }
+
+ // await gc up to ~5 seconds
+ var forceLoops = 50
+ while (emptyIterators.forall(_._2.get.isDefined) && forceLoops>0) {
+ System.gc()
+ Thread.sleep(100)
+ forceLoops -= 1
+ }
+
+ // real assertion
+ Assert.assertTrue(emptyIterators.exists(_._2.get.isEmpty))
+ }
+}
diff --git a/test/junit/scala/collection/immutable/PagedSeqTest.scala b/test/junit/scala/collection/immutable/PagedSeqTest.scala
index 5f83cf6f31..2b576a3655 100644
--- a/test/junit/scala/collection/immutable/PagedSeqTest.scala
+++ b/test/junit/scala/collection/immutable/PagedSeqTest.scala
@@ -5,12 +5,24 @@ import org.junit.runners.JUnit4
import org.junit.Test
import org.junit.Assert._
-/* Test for SI-6615 */
@RunWith(classOf[JUnit4])
class PagedSeqTest {
+ // should not NPE, and should equal the given Seq
@Test
- def rovingDoesNotNPE(): Unit = {
- // should not NPE, and should equal the given Seq
+ def test_SI6615(): Unit = {
assertEquals(Seq('a'), PagedSeq.fromStrings(List.fill(5000)("a")).slice(4096, 4097))
}
+
+ // Slices shouldn't read outside where they belong
+ @Test
+ def test_SI6519 {
+ var readAttempt = 0
+ val sideEffectingIterator = new Iterator[Int] {
+ def hasNext = readAttempt < 65536
+ def next = { readAttempt += 1; readAttempt }
+ }
+ val s = PagedSeq.fromIterator(sideEffectingIterator).slice(0,2).mkString
+ assertEquals(s, "12")
+ assert(readAttempt <= 4096)
+ }
}
diff --git a/test/junit/scala/collection/immutable/TreeMapTest.scala b/test/junit/scala/collection/immutable/TreeMapTest.scala
new file mode 100644
index 0000000000..4c21b94b24
--- /dev/null
+++ b/test/junit/scala/collection/immutable/TreeMapTest.scala
@@ -0,0 +1,20 @@
+package scala.collection.immutable
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class TreeMapTest {
+
+ @Test
+ def hasCorrectDropAndTakeMethods() {
+ val tree = TreeMap(1 -> "a", 2 -> "b", 3 -> "c")
+
+ assertEquals(TreeMap.empty[Int, String], tree take Int.MinValue)
+ assertEquals(TreeMap.empty[Int, String], tree takeRight Int.MinValue)
+ assertEquals(tree, tree drop Int.MinValue)
+ assertEquals(tree, tree dropRight Int.MinValue)
+ }
+}
diff --git a/test/junit/scala/collection/immutable/TreeSetTest.scala b/test/junit/scala/collection/immutable/TreeSetTest.scala
new file mode 100644
index 0000000000..8efe1bfeb8
--- /dev/null
+++ b/test/junit/scala/collection/immutable/TreeSetTest.scala
@@ -0,0 +1,20 @@
+package scala.collection.immutable
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class TreeSetTest {
+
+ @Test
+ def hasCorrectDropAndTakeMethods() {
+ val set = TreeSet(1, 2, 3)
+
+ assertEquals(TreeSet.empty[Int], set take Int.MinValue)
+ assertEquals(TreeSet.empty[Int], set takeRight Int.MinValue)
+ assertEquals(set, set drop Int.MinValue)
+ assertEquals(set, set dropRight Int.MinValue)
+ }
+}
diff --git a/test/junit/scala/math/BigDecimalTest.scala b/test/junit/scala/math/BigDecimalTest.scala
index d1ba96fcc8..c7a63da890 100644
--- a/test/junit/scala/math/BigDecimalTest.scala
+++ b/test/junit/scala/math/BigDecimalTest.scala
@@ -222,4 +222,10 @@ class BigDecimalTest {
for (a <- different; b <- different if (a ne b))
assert(a != b, "BigDecimal representations of Double mistakenly conflated")
}
+
+ // Make sure hash code agrees with decimal representation of Double
+ @Test
+ def test_SI8970() {
+ assert((0.1).## == BigDecimal(0.1).##)
+ }
}
diff --git a/test/junit/scala/tools/testing/AssertUtil.scala b/test/junit/scala/tools/testing/AssertUtil.scala
index 9b4833d46b..83a637783f 100644
--- a/test/junit/scala/tools/testing/AssertUtil.scala
+++ b/test/junit/scala/tools/testing/AssertUtil.scala
@@ -1,6 +1,11 @@
package scala.tools
package testing
+import org.junit.Assert
+import Assert.fail
+import scala.runtime.ScalaRunTime.stringOf
+import scala.collection.{ GenIterable, IterableLike }
+
/** This module contains additional higher-level assert statements
* that are ultimately based on junit.Assert primitives.
*/
@@ -21,6 +26,19 @@ object AssertUtil {
throw e
else return
}
- throw new AssertionError("Expression did not throw!")
+ fail("Expression did not throw!")
}
+
+ /** JUnit-style assertion for `IterableLike.sameElements`.
+ */
+ def assertSameElements[A, B >: A](expected: IterableLike[A, _], actual: GenIterable[B], message: String = ""): Unit =
+ if (!(expected sameElements actual))
+ fail(
+ f"${ if (message.nonEmpty) s"$message " else "" }expected:<${ stringOf(expected) }> but was:<${ stringOf(actual) }>"
+ )
+
+ /** Convenient for testing iterators.
+ */
+ def assertSameElements[A, B >: A](expected: IterableLike[A, _], actual: Iterator[B]): Unit =
+ assertSameElements(expected, actual.toList, "")
}
diff --git a/tools/binary-repo-lib.sh b/tools/binary-repo-lib.sh
index 654ba21547..437c0a0c08 100755
--- a/tools/binary-repo-lib.sh
+++ b/tools/binary-repo-lib.sh
@@ -2,15 +2,16 @@
#
# Library to push and pull binary artifacts from a remote repository using CURL.
-
remote_urlget="http://repo.typesafe.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap"
remote_urlpush="http://private-repo.typesafe.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap"
libraryJar="$(pwd)/lib/scala-library.jar"
desired_ext=".desired.sha1"
push_jar="$(pwd)/tools/push.jar"
+
if [[ "$OSTYPE" == *Cygwin* || "$OSTYPE" == *cygwin* ]]; then push_jar="$(cygpath -m "$push_jar")"; fi
# Cache dir has .sbt in it to line up with SBT build.
-cache_dir="${HOME}/.sbt/cache/scala"
+SCALA_BUILD_REPOS_HOME=${SCALA_BUILD_REPOS_HOME:=$HOME}
+cache_dir="${SCALA_BUILD_REPOS_HOME}/.sbt/cache/scala"
# Checks whether or not curl is installed and issues a warning on failure.
checkCurl() {