summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--bincompat-backward.whitelist.conf5
-rwxr-xr-xbuild.xml3
-rw-r--r--spec/01-lexical-syntax.md4
-rw-r--r--spec/13-syntax-summary.md2
-rwxr-xr-xsrc/compiler/scala/tools/ant/templates/tool-unix.tmpl12
-rw-r--r--src/compiler/scala/tools/ant/templates/tool-windows.tmpl2
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala147
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala3
-rw-r--r--src/compiler/scala/tools/nsc/backend/JavaPlatform.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Checkable.scala48
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala17
-rw-r--r--src/compiler/scala/tools/nsc/util/ClassPath.scala19
-rw-r--r--src/library/scala/Enumeration.scala1
-rwxr-xr-xsrc/library/scala/collection/IndexedSeqOptimized.scala4
-rw-r--r--src/library/scala/collection/IterableViewLike.scala4
-rw-r--r--src/library/scala/collection/Iterator.scala4
-rw-r--r--src/library/scala/collection/concurrent/Map.scala2
-rw-r--r--src/library/scala/collection/generic/GenericTraversableTemplate.scala2
-rw-r--r--src/library/scala/collection/immutable/TreeMap.scala4
-rw-r--r--src/library/scala/collection/immutable/TreeSet.scala4
-rw-r--r--src/library/scala/collection/mutable/ArrayBuffer.scala4
-rw-r--r--src/library/scala/collection/mutable/MapLike.scala2
-rw-r--r--src/library/scala/collection/mutable/MutableList.scala2
-rw-r--r--src/library/scala/collection/mutable/StringBuilder.scala2
-rw-r--r--src/library/scala/collection/parallel/ParIterable.scala3
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala3
-rw-r--r--src/library/scala/collection/parallel/ParMapLike.scala2
-rw-r--r--src/library/scala/collection/parallel/ParSetLike.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParMapLike.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSet.scala3
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSetLike.scala2
-rw-r--r--src/library/scala/concurrent/ExecutionContext.scala18
-rw-r--r--src/library/scala/runtime/Tuple2Zipped.scala4
-rw-r--r--src/library/scala/runtime/Tuple3Zipped.scala7
-rw-r--r--src/library/scala/sys/SystemProperties.scala2
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala4
-rw-r--r--src/reflect/scala/reflect/internal/pickling/UnPickler.scala2
-rw-r--r--src/reflect/scala/reflect/io/AbstractFile.scala16
-rw-r--r--src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala8
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ILoop.scala53
-rw-r--r--src/repl/scala/tools/nsc/interpreter/IMain.scala26
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/DocParser.scala3
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala2
-rw-r--r--test/files/neg/t2866.check17
-rw-r--r--test/files/neg/t2866.scala59
-rw-r--r--test/files/neg/t5639b.check4
-rw-r--r--test/files/neg/t5639b/A_1.scala17
-rw-r--r--test/files/neg/t5639b/A_2.scala11
-rw-r--r--test/files/neg/t8534.check4
-rw-r--r--test/files/neg/t8534.scala7
-rw-r--r--test/files/neg/t8534b.check4
-rw-r--r--test/files/neg/t8534b.scala4
-rw-r--r--test/files/neg/t8597.check21
-rw-r--r--test/files/neg/t8597.flags1
-rw-r--r--test/files/neg/t8597.scala27
-rw-r--r--test/files/neg/t8597b.check6
-rw-r--r--test/files/neg/t8597b.flags1
-rw-r--r--test/files/neg/t8597b.scala21
-rw-r--r--test/files/neg/t963.check8
-rw-r--r--test/files/neg/unchecked-abstract.check14
-rw-r--r--test/files/pos/t5639.flags1
-rw-r--r--test/files/pos/t5639/A_1.scala17
-rw-r--r--test/files/pos/t5639/A_2.scala11
-rw-r--r--test/files/pos/t5639/Bar.scala7
-rw-r--r--test/files/pos/t5639/Foo.scala7
-rw-r--r--test/files/pos/t7596/A_1.scala10
-rw-r--r--test/files/pos/t7596/B_2.scala19
-rw-r--r--test/files/pos/t7596b/A.scala10
-rw-r--r--test/files/pos/t7596b/B.scala6
-rw-r--r--test/files/pos/t7596c/A_1.scala11
-rw-r--r--test/files/pos/t7596c/B_2.scala9
-rw-r--r--test/files/run/iterator-concat.check4
-rw-r--r--test/files/run/iterator-concat.scala15
-rw-r--r--test/files/run/iterator-iterate-lazy.scala5
-rw-r--r--test/files/run/iterators.check14
-rw-r--r--test/files/run/iterators.scala168
-rw-r--r--test/files/run/t2866.check3
-rw-r--r--test/files/run/t2866.scala44
-rw-r--r--test/files/run/t3516.check3
-rw-r--r--test/files/run/t3516.scala13
-rw-r--r--test/files/run/t5938.scala35
-rw-r--r--test/files/run/t6502.check8
-rw-r--r--test/files/run/t6502.scala101
-rw-r--r--test/files/run/t7407.flags2
-rw-r--r--test/files/run/t8253.check40
-rw-r--r--test/files/run/t8253.scala14
-rw-r--r--test/files/run/t8925.check2
-rw-r--r--test/files/run/t8925.flags1
-rw-r--r--test/files/run/t8925.scala31
-rw-r--r--test/files/run/t8946.scala29
-rw-r--r--test/files/t8449/Client.scala3
-rw-r--r--test/files/t8449/Test.java10
-rw-r--r--test/junit/scala/collection/IndexedSeqOptimizedTest.scala13
-rw-r--r--test/junit/scala/collection/IterableViewLikeTest.scala20
-rw-r--r--test/junit/scala/collection/IteratorTest.scala133
-rw-r--r--test/junit/scala/collection/immutable/TreeMapTest.scala20
-rw-r--r--test/junit/scala/collection/immutable/TreeSetTest.scala20
-rw-r--r--test/junit/scala/tools/testing/AssertUtil.scala20
101 files changed, 1248 insertions, 324 deletions
diff --git a/bincompat-backward.whitelist.conf b/bincompat-backward.whitelist.conf
index 076b9bb9aa..56d5b0135c 100644
--- a/bincompat-backward.whitelist.conf
+++ b/bincompat-backward.whitelist.conf
@@ -198,6 +198,11 @@ filter {
{
matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$4"
problemName=MissingMethodProblem
+ },
+ // SI-8946
+ {
+ matchName="scala.reflect.runtime.ThreadLocalStorage#MyThreadLocalStorage.values"
+ problemName=MissingMethodProblem
}
]
}
diff --git a/build.xml b/build.xml
index 94e3132501..02b98e66d8 100755
--- a/build.xml
+++ b/build.xml
@@ -848,8 +848,7 @@ TODO:
-->
<path id="pack.reflect.files"> <fileset dir="${build-quick.dir}/classes/reflect"/> </path>
- <path id="pack.scalap.files"> <fileset dir="${build-quick.dir}/classes/scalap"/>
- <fileset file="${src.dir}/scalap/decoder.properties"/> </path>
+ <path id="pack.scalap.files"> <fileset dir="${build-quick.dir}/classes/scalap"/> </path>
<path id="pack.partest-extras.files"> <fileset dir="${build-quick.dir}/classes/partest-extras"/> </path>
<path id="pack.partest-javaagent.files"> <fileset dir="${build-quick.dir}/classes/partest-javaagent"/> </path>
diff --git a/spec/01-lexical-syntax.md b/spec/01-lexical-syntax.md
index d5752bbdf0..c37d8f2a0b 100644
--- a/spec/01-lexical-syntax.md
+++ b/spec/01-lexical-syntax.md
@@ -323,14 +323,12 @@ Literal ::= [‘-’] integerLiteral
### Integer Literals
```ebnf
-integerLiteral ::= (decimalNumeral | hexNumeral | octalNumeral)
+integerLiteral ::= (decimalNumeral | hexNumeral)
[‘L’ | ‘l’]
decimalNumeral ::= ‘0’ | nonZeroDigit {digit}
hexNumeral ::= ‘0’ ‘x’ hexDigit {hexDigit}
-octalNumeral ::= ‘0’ octalDigit {octalDigit}
digit ::= ‘0’ | nonZeroDigit
nonZeroDigit ::= ‘1’ | … | ‘9’
-octalDigit ::= ‘0’ | … | ‘7’
```
Integer literals are usually of type `Int`, or of type
diff --git a/spec/13-syntax-summary.md b/spec/13-syntax-summary.md
index 86efcf70a8..ae941f189e 100644
--- a/spec/13-syntax-summary.md
+++ b/spec/13-syntax-summary.md
@@ -210,7 +210,7 @@ grammar.
ClassParams ::= ClassParam {‘,’ ClassParam}
ClassParam ::= {Annotation} {Modifier} [(`val' | `var')]
id ‘:’ ParamType [‘=’ Expr]
- Bindings ::= ‘(’ Binding {‘,’ Binding ‘)’
+ Bindings ::= ‘(’ Binding {‘,’ Binding} ‘)’
Binding ::= (id | ‘_’) [‘:’ Type]
Modifier ::= LocalModifier
diff --git a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
index f58223a39e..7acb3632d2 100755
--- a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
@@ -86,10 +86,14 @@ fi
TOOL_CLASSPATH="@classpath@"
if [[ -z "$TOOL_CLASSPATH" ]]; then
for ext in "$SCALA_HOME"/lib/* ; do
- if [[ -z "$TOOL_CLASSPATH" ]]; then
- TOOL_CLASSPATH="$ext"
- else
- TOOL_CLASSPATH="${TOOL_CLASSPATH}${SEP}${ext}"
+ file_extension="${ext##*.}"
+ # SI-8967 Only consider directories and files named '*.jar'
+ if [[ -d "$ext" || $file_extension == "jar" ]]; then
+ if [[ -z "$TOOL_CLASSPATH" ]]; then
+ TOOL_CLASSPATH="$ext"
+ else
+ TOOL_CLASSPATH="${TOOL_CLASSPATH}${SEP}${ext}"
+ fi
fi
done
fi
diff --git a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
index cf0e003f10..50e44fb669 100644
--- a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
@@ -128,7 +128,7 @@ if defined _JAVA_PARAMS set _JAVA_OPTS=%_JAVA_OPTS% %_JAVA_PARAMS%
set _TOOL_CLASSPATH=@classpath@
if "%_TOOL_CLASSPATH%"=="" (
- for %%f in ("!_SCALA_HOME!\lib\*") do call :add_cpath "%%f"
+ for %%f in ("!_SCALA_HOME!\lib\*.jar") do call :add_cpath "%%f"
for /d %%f in ("!_SCALA_HOME!\lib\*") do call :add_cpath "%%f"
)
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 9cc9712b44..430424d0f8 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -8,12 +8,13 @@ package tools
package nsc
import java.io.{ File, FileOutputStream, PrintWriter, IOException, FileNotFoundException }
+import java.net.URL
import java.nio.charset.{ Charset, CharsetDecoder, IllegalCharsetNameException, UnsupportedCharsetException }
import scala.compat.Platform.currentTime
import scala.collection.{ mutable, immutable }
import io.{ SourceReader, AbstractFile, Path }
import reporters.{ Reporter, ConsoleReporter }
-import util.{ ClassPath, StatisticsInfo, returning, stackTraceString }
+import util.{ ClassPath, MergedClassPath, StatisticsInfo, returning, stackTraceString }
import scala.reflect.ClassTag
import scala.reflect.internal.util.{ OffsetPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile }
import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
@@ -841,6 +842,150 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
} reverse
}
+ // ------------ REPL utilities ---------------------------------
+
+ /** Extend classpath of `platform` and rescan updated packages. */
+ def extendCompilerClassPath(urls: URL*): Unit = {
+ val newClassPath = platform.classPath.mergeUrlsIntoClassPath(urls: _*)
+ platform.currentClassPath = Some(newClassPath)
+ // Reload all specified jars into this compiler instance
+ invalidateClassPathEntries(urls.map(_.getPath): _*)
+ }
+
+ // ------------ Invalidations ---------------------------------
+
+ /** Is given package class a system package class that cannot be invalidated?
+ */
+ private def isSystemPackageClass(pkg: Symbol) =
+ pkg == RootClass || (pkg.hasTransOwner(definitions.ScalaPackageClass) && !pkg.hasTransOwner(this.rootMirror.staticPackage("scala.tools").moduleClass.asClass))
+
+ /** Invalidates packages that contain classes defined in a classpath entry, and
+ * rescans that entry.
+ *
+ * First, the classpath entry referred to by one of the `paths` is rescanned,
+ * so that any new files or changes in subpackages are picked up.
+ * Second, any packages for which one of the following conditions is met is invalidated:
+ * - the classpath entry contained during the last compilation run now contains classfiles
+ * that represent a member in the package;
+ * - the classpath entry now contains classfiles that represent a member in the package;
+ * - the set of subpackages has changed.
+ *
+ * The invalidated packages are reset in their entirety; all member classes and member packages
+ * are re-accessed using the new classpath.
+ *
+ * System packages that the compiler needs to access as part of standard definitions
+ * are not invalidated. A system package is:
+ * Any package rooted in "scala", with the exception of packages rooted in "scala.tools".
+ *
+ * @param paths Fully-qualified names that refer to directories or jar files that are
+ * entries on the classpath.
+ */
+ def invalidateClassPathEntries(paths: String*): Unit = {
+ implicit object ClassPathOrdering extends Ordering[PlatformClassPath] {
+ def compare(a:PlatformClassPath, b:PlatformClassPath) = a.asClasspathString compare b.asClasspathString
+ }
+ val invalidated, failed = new mutable.ListBuffer[ClassSymbol]
+ classPath match {
+ case cp: MergedClassPath[_] =>
+ def assoc(path: String): List[(PlatformClassPath, PlatformClassPath)] = {
+ val dir = AbstractFile.getDirectory(path)
+ val canonical = dir.canonicalPath
+ def matchesCanonical(e: ClassPath[_]) = e.origin match {
+ case Some(opath) =>
+ AbstractFile.getDirectory(opath).canonicalPath == canonical
+ case None =>
+ false
+ }
+ cp.entries find matchesCanonical match {
+ case Some(oldEntry) =>
+ List(oldEntry -> cp.context.newClassPath(dir))
+ case None =>
+ error(s"Error adding entry to classpath. During invalidation, no entry named $path in classpath $classPath")
+ List()
+ }
+ }
+ val subst = immutable.TreeMap(paths flatMap assoc: _*)
+ if (subst.nonEmpty) {
+ platform updateClassPath subst
+ informProgress(s"classpath updated on entries [${subst.keys mkString ","}]")
+ def mkClassPath(elems: Iterable[PlatformClassPath]): PlatformClassPath =
+ if (elems.size == 1) elems.head
+ else new MergedClassPath(elems, classPath.context)
+ val oldEntries = mkClassPath(subst.keys)
+ val newEntries = mkClassPath(subst.values)
+ mergeNewEntries(newEntries, RootClass, Some(classPath), Some(oldEntries), invalidated, failed)
+ }
+ }
+ def show(msg: String, syms: scala.collection.Traversable[Symbol]) =
+ if (syms.nonEmpty)
+ informProgress(s"$msg: ${syms map (_.fullName) mkString ","}")
+ show("invalidated packages", invalidated)
+ show("could not invalidate system packages", failed)
+ }
+
+ /** Merges new classpath entries into the symbol table
+ *
+ * @param newEntries The new classpath entries
+ * @param root The root symbol to be resynced (a package class)
+ * @param allEntries Optionally, the corresponding package in the complete current classpath
+ * @param oldEntries Optionally, the corresponding package in the old classpath entries
+ * @param invalidated A listbuffer collecting the invalidated package classes
+ * @param failed A listbuffer collecting system package classes which could not be invalidated
+ *
+ * The merging strategy is determined by the absence or presence of classes and packages.
+ *
+ * If either oldEntries or newEntries contains classes, root is invalidated provided that a corresponding package
+ * exists in allEntries. Otherwise it is removed.
+ * Otherwise, the action is determined by the following matrix, with columns:
+ *
+ * old sym action
+ * + + recurse into all child packages of newEntries
+ * - + invalidate root
+ * - - create and enter root
+ *
+ * Here, old means classpath, and sym means symboltable. + is presence of an entry in its column, - is absence.
+ */
+ private def mergeNewEntries(newEntries: PlatformClassPath, root: ClassSymbol,
+ allEntries: OptClassPath, oldEntries: OptClassPath,
+ invalidated: mutable.ListBuffer[ClassSymbol], failed: mutable.ListBuffer[ClassSymbol]) {
+ ifDebug(informProgress(s"syncing $root, $oldEntries -> $newEntries"))
+
+ val getName: ClassPath[AbstractFile] => String = (_.name)
+ def hasClasses(cp: OptClassPath) = cp.isDefined && cp.get.classes.nonEmpty
+ def invalidateOrRemove(root: ClassSymbol) = {
+ allEntries match {
+ case Some(cp) => root setInfo new loaders.PackageLoader(cp)
+ case None => root.owner.info.decls unlink root.sourceModule
+ }
+ invalidated += root
+ }
+ def subPackage(cp: PlatformClassPath, name: String): OptClassPath =
+ cp.packages find (cp1 => getName(cp1) == name)
+
+ val classesFound = hasClasses(oldEntries) || newEntries.classes.nonEmpty
+ if (classesFound && !isSystemPackageClass(root)) {
+ invalidateOrRemove(root)
+ } else {
+ if (classesFound) {
+ if (root.isRoot) invalidateOrRemove(EmptyPackageClass)
+ else failed += root
+ }
+ if (!oldEntries.isDefined) invalidateOrRemove(root)
+ else
+ for (pstr <- newEntries.packages.map(getName)) {
+ val pname = newTermName(pstr)
+ val pkg = (root.info decl pname) orElse {
+ // package does not exist in symbol table, create symbol to track it
+ assert(!subPackage(oldEntries.get, pstr).isDefined)
+ loaders.enterPackage(root, pstr, new loaders.PackageLoader(allEntries.get))
+ }
+ mergeNewEntries(subPackage(newEntries, pstr).get, pkg.moduleClass.asClass,
+ subPackage(allEntries.get, pstr), subPackage(oldEntries.get, pstr),
+ invalidated, failed)
+ }
+ }
+ }
+
// ----------- Runs ---------------------------------------
private var curRun: Run = null
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
index 1abc0c860c..8cd915bf22 100755
--- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
@@ -184,7 +184,8 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
)
val uri1 = attrMap(z) match {
- case Apply(_, List(uri @ Literal(Constant(_)))) => mkAssign(uri)
+ case Apply(Select(New(Select(Select(Select(Ident(nme.ROOTPKG), nme.scala_), nme.xml), tpnme.Text)), nme.CONSTRUCTOR), List(uri @ Literal(Constant(_)))) =>
+ mkAssign(uri)
case Select(_, nme.Nil) => mkAssign(const(null)) // allow for xmlns="" -- bug #1626
case x => mkAssign(x)
}
diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
index 7236bf70d5..4877bd9b80 100644
--- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
+++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
@@ -16,7 +16,7 @@ trait JavaPlatform extends Platform {
import global._
import definitions._
- private var currentClassPath: Option[MergedClassPath[AbstractFile]] = None
+ private[nsc] var currentClassPath: Option[MergedClassPath[AbstractFile]] = None
def classPath: ClassPath[AbstractFile] = {
if (currentClassPath.isEmpty) currentClassPath = Some(new PathResolver(settings).result)
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index 351eb23c4c..aa18b26d93 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -26,7 +26,7 @@ import scala.reflect.internal.util.NoSourceFile
* where `p` is defined in a library L, and is accessed from a library C (for Client),
* where C was compiled against L', an optimized version of L where the inliner made `p` public at the bytecode level.
* The only such members are fields, either synthetic or isParamAccessor, and thus having a dollar sign in their name
- * (the accesibility of methods and constructors isn't touched by the inliner).
+ * (the accessibility of methods and constructors isn't touched by the inliner).
*
* Thus we add one more goal to our list:
* (c) Compile C (either optimized or not) against any of L or L',
diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
index 3a77cab919..fc632e0d0d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
@@ -11,12 +11,28 @@ import scala.language.postfixOps
/** On pattern matcher checkability:
*
+ * The spec says that case _: List[Int] should be always issue
+ * an unchecked warning:
+ *
+ * > Types which are not of one of the forms described above are
+ * > also accepted as type patterns. However, such type patterns
+ * > will be translated to their erasure (§3.7). The Scala compiler
+ * > will issue an “unchecked” warning for these patterns to flag
+ * > the possible loss of type-safety.
+ *
+ * But the implementation goes a little further to omit warnings
+ * based on the static type of the scrutinee. As a trivial example:
+ *
+ * def foo(s: Seq[Int]) = s match { case _: List[Int] => }
+ *
+ * need not issue this warning.
+ *
* Consider a pattern match of this form: (x: X) match { case _: P => }
*
* There are four possibilities to consider:
* [P1] X will always conform to P
* [P2] x will never conform to P
- * [P3] X <: P if some runtime test is true
+ * [P3] X will conform to P if some runtime test is true
* [P4] X cannot be checked against P
*
* The first two cases correspond to those when there is enough
@@ -28,6 +44,11 @@ import scala.language.postfixOps
* which is essentially the intersection of X and |P|, where |P| is
* the erasure of P. If XR <: P, then no warning is emitted.
*
+ * We evaluate "X with conform to P" by checking `X <: P_wild, where
+ * P_wild is the result of substituting wildcard types in place of
+ * pattern type variables. This is intentionally stricter than
+ * (X matchesPattern P), see SI-8597 for motivating test cases.
+ *
* Examples of how this info is put to use:
* sealed trait A[T] ; class B[T] extends A[T]
* def f(x: B[Int]) = x match { case _: A[Int] if true => }
@@ -100,7 +121,7 @@ trait Checkable {
private def typeArgsInTopLevelType(tp: Type): List[Type] = {
val tps = tp match {
case RefinedType(parents, _) => parents flatMap typeArgsInTopLevelType
- case TypeRef(_, ArrayClass, arg :: Nil) => typeArgsInTopLevelType(arg)
+ case TypeRef(_, ArrayClass, arg :: Nil) => if (arg.typeSymbol.isAbstractType) arg :: Nil else typeArgsInTopLevelType(arg)
case TypeRef(pre, sym, args) => typeArgsInTopLevelType(pre) ++ args
case ExistentialType(tparams, underlying) => tparams.map(_.tpe) ++ typeArgsInTopLevelType(underlying)
case _ => Nil
@@ -108,14 +129,31 @@ trait Checkable {
tps filterNot isUnwarnableTypeArg
}
+ private def scrutConformsToPatternType(scrut: Type, pattTp: Type): Boolean = {
+ def typeVarToWildcard(tp: Type) = {
+ // The need for typeSymbolDirect is demonstrated in neg/t8597b.scala
+ if (tp.typeSymbolDirect.isPatternTypeVariable) WildcardType else tp
+ }
+ val pattTpWild = pattTp.map(typeVarToWildcard)
+ scrut <:< pattTpWild
+ }
+
private class CheckabilityChecker(val X: Type, val P: Type) {
def Xsym = X.typeSymbol
def Psym = P.typeSymbol
- def XR = if (Xsym == AnyClass) classExistentialType(Psym) else propagateKnownTypes(X, Psym)
+ def PErased = {
+ P match {
+ case erasure.GenericArray(n, core) => existentialAbstraction(core.typeSymbol :: Nil, P)
+ case _ => existentialAbstraction(Psym.typeParams, Psym.tpe_*)
+ }
+ }
+ def XR = if (Xsym == AnyClass) PErased else propagateKnownTypes(X, Psym)
+
+
// sadly the spec says (new java.lang.Boolean(true)).isInstanceOf[scala.Boolean]
- def P1 = X matchesPattern P
+ def P1 = scrutConformsToPatternType(X, P)
def P2 = !Psym.isPrimitiveValueClass && isNeverSubType(X, P)
- def P3 = isNonRefinementClassType(P) && (XR matchesPattern P)
+ def P3 = isNonRefinementClassType(P) && scrutConformsToPatternType(XR, P)
def P4 = !(P1 || P2 || P3)
def summaryString = f"""
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index e278130437..b13f9e94cc 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -798,7 +798,7 @@ trait Contexts { self: Analyzer =>
isAccessible(sym, pre) &&
!(imported && {
val e = scope.lookupEntry(name)
- (e ne null) && (e.owner == scope)
+ (e ne null) && (e.owner == scope) && (!settings.isScala212 || e.sym.exists)
})
private def collectImplicits(syms: Scope, pre: Type, imported: Boolean = false): List[ImplicitInfo] =
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index a182a2e269..4d9a6a47ef 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -870,13 +870,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
def adaptType(): Tree = {
// @M When not typing a type constructor (!context.inTypeConstructorAllowed)
- // or raw type (tree.symbol.isJavaDefined && context.unit.isJava), types must be of kind *,
+ // or raw type, types must be of kind *,
// and thus parameterized types must be applied to their type arguments
// @M TODO: why do kind-* tree's have symbols, while higher-kinded ones don't?
def properTypeRequired = (
tree.hasSymbolField
&& !context.inTypeConstructorAllowed
- && !(tree.symbol.isJavaDefined && context.unit.isJava)
+ && !context.unit.isJava
)
// @M: don't check tree.tpe.symbol.typeParams. check tree.tpe.typeParams!!!
// (e.g., m[Int] --> tree.tpe.symbol.typeParams.length == 1, tree.tpe.typeParams.length == 0!)
@@ -5183,16 +5183,19 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
typed(tree.ref, MonoQualifierModes | mode.onlyTypePat, AnyRefTpe)
}
- if (!refTyped.isErrorTyped)
+ if (refTyped.isErrorTyped) {
+ setError(tree)
+ } else {
tree setType refTyped.tpe.resultType
-
- if (treeInfo.admitsTypeSelection(refTyped)) tree
- else UnstableTreeError(refTyped)
+ if (refTyped.isErrorTyped || treeInfo.admitsTypeSelection(refTyped)) tree
+ else UnstableTreeError(tree)
+ }
}
def typedSelectFromTypeTree(tree: SelectFromTypeTree) = {
val qual1 = typedType(tree.qualifier, mode)
- if (qual1.tpe.isVolatile) TypeSelectionFromVolatileTypeError(tree, qual1)
+ if (qual1.isErrorTyped) setError(treeCopy.SelectFromTypeTree(tree, qual1, tree.name))
+ else if (qual1.tpe.isVolatile) TypeSelectionFromVolatileTypeError(tree, qual1)
else typedSelect(tree, qual1, tree.name)
}
diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala
index e89f08ec6b..e78dee5eee 100644
--- a/src/compiler/scala/tools/nsc/util/ClassPath.scala
+++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala
@@ -197,6 +197,23 @@ abstract class ClassPath[T] {
def packages: IndexedSeq[ClassPath[T]]
def sourcepaths: IndexedSeq[AbstractFile]
+ /** The entries this classpath is composed of. In class `ClassPath` it's just the singleton list containing `this`.
+ * Subclasses such as `MergedClassPath` typically return lists with more elements.
+ */
+ def entries: IndexedSeq[ClassPath[T]] = IndexedSeq(this)
+
+ /** Merge classpath of `platform` and `urls` into merged classpath */
+ def mergeUrlsIntoClassPath(urls: URL*): MergedClassPath[T] = {
+ // Collect our new jars/directories and add them to the existing set of classpaths
+ val allEntries =
+ (entries ++
+ urls.map(url => context.newClassPath(io.AbstractFile.getURL(url)))
+ ).distinct
+
+ // Combine all of our classpaths (old and new) into one merged classpath
+ new MergedClassPath(allEntries, context)
+ }
+
/**
* Represents classes which can be loaded with a ClassfileLoader and/or SourcefileLoader.
*/
@@ -322,7 +339,7 @@ extends MergedClassPath[T](original.entries map (e => subst getOrElse (e, e)), o
* A classpath unifying multiple class- and sourcepath entries.
*/
class MergedClassPath[T](
- val entries: IndexedSeq[ClassPath[T]],
+ override val entries: IndexedSeq[ClassPath[T]],
val context: ClassPathContext[T])
extends ClassPath[T] {
def this(entries: TraversableOnce[ClassPath[T]], context: ClassPathContext[T]) =
diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala
index d4b9c17eab..e11d1b35d7 100644
--- a/src/library/scala/Enumeration.scala
+++ b/src/library/scala/Enumeration.scala
@@ -239,6 +239,7 @@ abstract class Enumeration (initial: Int) extends Serializable {
*
* @param nnIds The set of ids of values (adjusted so that the lowest value does
* not fall below zero), organized as a `BitSet`.
+ * @define Coll `collection.immutable.SortedSet`
*/
class ValueSet private[ValueSet] (private[this] var nnIds: immutable.BitSet)
extends AbstractSet[Value]
diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala
index 42cb37aa24..a7e06b4d1a 100755
--- a/src/library/scala/collection/IndexedSeqOptimized.scala
+++ b/src/library/scala/collection/IndexedSeqOptimized.scala
@@ -141,10 +141,10 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] {
def drop(n: Int): Repr = slice(n, length)
override /*IterableLike*/
- def takeRight(n: Int): Repr = slice(length - n, length)
+ def takeRight(n: Int): Repr = slice(length - math.max(n, 0), length)
override /*IterableLike*/
- def dropRight(n: Int): Repr = slice(0, length - n)
+ def dropRight(n: Int): Repr = slice(0, length - math.max(n, 0))
override /*TraversableLike*/
def splitAt(n: Int): (Repr, Repr) = (take(n), drop(n))
diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala
index 668190f700..b84d90c51b 100644
--- a/src/library/scala/collection/IterableViewLike.scala
+++ b/src/library/scala/collection/IterableViewLike.scala
@@ -150,10 +150,10 @@ trait IterableViewLike[+A,
sliding(size, 1) // we could inherit this, but that implies knowledge of the way the super class is implemented.
override def dropRight(n: Int): This =
- take(thisSeq.length - n)
+ take(thisSeq.length - math.max(n, 0))
override def takeRight(n: Int): This =
- drop(thisSeq.length - n)
+ drop(thisSeq.length - math.max(n, 0))
override def stringPrefix = "IterableView"
}
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index 0115cc154c..20712f918c 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -322,8 +322,8 @@ trait Iterator[+A] extends TraversableOnce[A] {
*/
def drop(n: Int): Iterator[A] = {
var j = 0
- while (j < n && this.hasNext) {
- this.next
+ while (j < n && hasNext) {
+ next()
j += 1
}
this
diff --git a/src/library/scala/collection/concurrent/Map.scala b/src/library/scala/collection/concurrent/Map.scala
index 02e5dd01f5..2eea15b8dc 100644
--- a/src/library/scala/collection/concurrent/Map.scala
+++ b/src/library/scala/collection/concurrent/Map.scala
@@ -20,7 +20,7 @@ package collection.concurrent
* @tparam A the key type of the map
* @tparam B the value type of the map
*
- * @define Coll `ConcurrentMap`
+ * @define Coll `concurrent.Map`
* @define coll concurrent map
* @define concurrentmapinfo
* This is a base trait for all Scala concurrent map implementations. It
diff --git a/src/library/scala/collection/generic/GenericTraversableTemplate.scala b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
index 64cf1cfb1e..54455c531a 100644
--- a/src/library/scala/collection/generic/GenericTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
@@ -25,7 +25,7 @@ import scala.language.higherKinds
* @author Martin Odersky
* @since 2.8
* @define coll collection
- * @define Coll CC
+ * @define Coll Traversable
*/
trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNewBuilder[A, CC[A] @uncheckedVariance] {
diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala
index 8cc99a53e6..662075cd93 100644
--- a/src/library/scala/collection/immutable/TreeMap.scala
+++ b/src/library/scala/collection/immutable/TreeMap.scala
@@ -101,8 +101,8 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
else new TreeMap(RB.slice(tree, from, until))
}
- override def dropRight(n: Int) = take(size - n)
- override def takeRight(n: Int) = drop(size - n)
+ override def dropRight(n: Int) = take(size - math.max(n, 0))
+ override def takeRight(n: Int) = drop(size - math.max(n, 0))
override def splitAt(n: Int) = (take(n), drop(n))
private[this] def countWhile(p: ((A, B)) => Boolean): Int = {
diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala
index 681dbbd1a8..7378211db0 100644
--- a/src/library/scala/collection/immutable/TreeSet.scala
+++ b/src/library/scala/collection/immutable/TreeSet.scala
@@ -87,8 +87,8 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin
else newSet(RB.slice(tree, from, until))
}
- override def dropRight(n: Int) = take(size - n)
- override def takeRight(n: Int) = drop(size - n)
+ override def dropRight(n: Int) = take(size - math.max(n, 0))
+ override def takeRight(n: Int) = drop(size - math.max(n, 0))
override def splitAt(n: Int) = (take(n), drop(n))
private[this] def countWhile(p: A => Boolean): Int = {
diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala
index 8df606222f..8f77114746 100644
--- a/src/library/scala/collection/mutable/ArrayBuffer.scala
+++ b/src/library/scala/collection/mutable/ArrayBuffer.scala
@@ -30,8 +30,8 @@ import parallel.mutable.ParArray
*
* @tparam A the type of this arraybuffer's elements.
*
- * @define Coll `ArrayBuffer`
- * @define coll arraybuffer
+ * @define Coll `mutable.ArrayBuffer`
+ * @define coll array buffer
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `ArrayBuffer[B]` because an implicit of type `CanBuildFrom[ArrayBuffer, B, ArrayBuffer[B]]`
* is defined in object `ArrayBuffer`.
diff --git a/src/library/scala/collection/mutable/MapLike.scala b/src/library/scala/collection/mutable/MapLike.scala
index 6230fc23aa..471cd1cdde 100644
--- a/src/library/scala/collection/mutable/MapLike.scala
+++ b/src/library/scala/collection/mutable/MapLike.scala
@@ -18,6 +18,8 @@ import scala.collection.parallel.mutable.ParMap
/** A template trait for mutable maps.
* $mapNote
* $mapTags
+ * @define Coll `mutable.Map`
+ * @define coll mutable map
* @since 2.8
*
* @define mapNote
diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala
index a0d3ee0ef0..b852a4747b 100644
--- a/src/library/scala/collection/mutable/MutableList.scala
+++ b/src/library/scala/collection/mutable/MutableList.scala
@@ -22,6 +22,8 @@ import immutable.{List, Nil}
* @author Martin Odersky
* @version 2.8
* @since 1
+ * @define Coll `mutable.MutableList`
+ * @define coll mutable list
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#mutable_lists "Scala's Collection Library overview"]]
* section on `Mutable Lists` for more information.
*/
diff --git a/src/library/scala/collection/mutable/StringBuilder.scala b/src/library/scala/collection/mutable/StringBuilder.scala
index 498e9e461e..c56d40786e 100644
--- a/src/library/scala/collection/mutable/StringBuilder.scala
+++ b/src/library/scala/collection/mutable/StringBuilder.scala
@@ -22,6 +22,8 @@ import immutable.StringLike
* @author Martin Odersky
* @version 2.8
* @since 2.7
+ * @define Coll `mutable.IndexedSeq`
+ * @define coll string builder
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html# "Scala's Collection Library overview"]]
* section on `StringBuilders` for more information.
*/
diff --git a/src/library/scala/collection/parallel/ParIterable.scala b/src/library/scala/collection/parallel/ParIterable.scala
index 2ceeb18eef..a5ba8c49ad 100644
--- a/src/library/scala/collection/parallel/ParIterable.scala
+++ b/src/library/scala/collection/parallel/ParIterable.scala
@@ -23,9 +23,6 @@ import scala.collection.parallel.mutable.ParArrayCombiner
*
* @author Aleksandar Prokopec
* @since 2.9
- *
- * @define Coll `ParIterable`
- * @define coll parallel iterable
*/
trait ParIterable[+T]
extends GenIterable[T]
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 445edd23cb..2b54e05841 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -150,7 +150,8 @@ import scala.collection.parallel.ParallelCollectionImplicits._
* @define indexsignalling
* This method will use `indexFlag` signalling capabilities. This means
* that splitters may set and read the `indexFlag` state.
- *
+ * @define Coll `ParIterable`
+ * @define coll parallel iterable
*/
trait ParIterableLike[+T, +Repr <: ParIterable[T], +Sequential <: Iterable[T] with IterableLike[T, Sequential]]
extends GenIterableLike[T, Repr]
diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala
index d2b15c727a..ee1334ba55 100644
--- a/src/library/scala/collection/parallel/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/ParMapLike.scala
@@ -24,6 +24,8 @@ import scala.collection.generic.Signalling
*
* @tparam K the key type of the map
* @tparam V the value type of the map
+ * @define Coll `ParMap`
+ * @define coll parallel map
*
* @author Aleksandar Prokopec
* @since 2.9
diff --git a/src/library/scala/collection/parallel/ParSetLike.scala b/src/library/scala/collection/parallel/ParSetLike.scala
index 4e9a2e5751..4feda5ff07 100644
--- a/src/library/scala/collection/parallel/ParSetLike.scala
+++ b/src/library/scala/collection/parallel/ParSetLike.scala
@@ -20,6 +20,8 @@ import scala.collection.Set
* $sideeffects
*
* @tparam T the element type of the set
+ * @define Coll `ParSet`
+ * @define coll parallel set
*
* @author Aleksandar Prokopec
* @since 2.9
diff --git a/src/library/scala/collection/parallel/mutable/ParMapLike.scala b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
index 42027f5bac..5d99394a50 100644
--- a/src/library/scala/collection/parallel/mutable/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
@@ -22,6 +22,8 @@ import scala.collection.generic.Shrinkable
*
* @tparam K the key type of the map
* @tparam V the value type of the map
+ * @define Coll `ParMap`
+ * @define coll parallel map
*
* @author Aleksandar Prokopec
* @since 2.9
diff --git a/src/library/scala/collection/parallel/mutable/ParSet.scala b/src/library/scala/collection/parallel/mutable/ParSet.scala
index 9367f1424d..4e2d3e0e4c 100644
--- a/src/library/scala/collection/parallel/mutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSet.scala
@@ -14,9 +14,6 @@ import scala.collection.parallel.Combiner
/** A mutable variant of `ParSet`.
*
- * @define Coll `mutable.ParSet`
- * @define coll mutable parallel set
- *
* @author Aleksandar Prokopec
*/
trait ParSet[T]
diff --git a/src/library/scala/collection/parallel/mutable/ParSetLike.scala b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
index 13af5ed649..08aa3b024b 100644
--- a/src/library/scala/collection/parallel/mutable/ParSetLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
@@ -21,6 +21,8 @@ import scala.collection.generic.Shrinkable
* $sideeffects
*
* @tparam T the element type of the set
+ * @define Coll `mutable.ParSet`
+ * @define coll mutable parallel set
*
* @author Aleksandar Prokopec
* @since 2.9
diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala
index 11d3bb8b02..e380c55880 100644
--- a/src/library/scala/concurrent/ExecutionContext.scala
+++ b/src/library/scala/concurrent/ExecutionContext.scala
@@ -110,8 +110,9 @@ object ExecutionContext {
* The explicit global `ExecutionContext`. Invoke `global` when you want to provide the global
* `ExecutionContext` explicitly.
*
- * The default `ExecutionContext` implementation is backed by a port of
- * [[http://gee.cs.oswego.edu/dl/jsr166/dist/jsr166-4jdk7docs/java/util/concurrent/ForkJoinPool.html java.util.concurrent.ForkJoinPool]].
+ * The default `ExecutionContext` implementation is backed by a work-stealing thread pool. By default,
+ * the thread pool uses a target number of worker threads equal to the number of
+ * [[https://docs.oracle.com/javase/8/docs/api/java/lang/Runtime.html#availableProcessors-- available processors]].
*
* @return the global `ExecutionContext`
*/
@@ -122,15 +123,16 @@ object ExecutionContext {
* The implicit global `ExecutionContext`. Import `global` when you want to provide the global
* `ExecutionContext` implicitly.
*
- * The default `ExecutionContext` implementation is backed by a port of
- * [[http://gee.cs.oswego.edu/dl/jsr166/dist/jsr166-4jdk7docs/java/util/concurrent/ForkJoinPool.html java.util.concurrent.ForkJoinPool]].
+ * The default `ExecutionContext` implementation is backed by a work-stealing thread pool. By default,
+ * the thread pool uses a target number of worker threads equal to the number of
+ * [[https://docs.oracle.com/javase/8/docs/api/java/lang/Runtime.html#availableProcessors-- available processors]].
*/
implicit lazy val global: ExecutionContextExecutor = impl.ExecutionContextImpl.fromExecutor(null: Executor)
}
/** Creates an `ExecutionContext` from the given `ExecutorService`.
*
- * @param e the `ExecutorService` to use
+ * @param e the `ExecutorService` to use. If `null`, a new `ExecutorService` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]].
* @param reporter a function for error reporting
* @return the `ExecutionContext` using the given `ExecutorService`
*/
@@ -147,14 +149,14 @@ object ExecutionContext {
* val ec = ExecutionContext.fromExecutorService(Executors.newSingleThreadExecutor())
* }}}
*
- * @param e the `ExecutorService` to use
+ * @param e the `ExecutorService` to use. If `null`, a new `ExecutorService` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]].
* @return the `ExecutionContext` using the given `ExecutorService`
*/
def fromExecutorService(e: ExecutorService): ExecutionContextExecutorService = fromExecutorService(e, defaultReporter)
/** Creates an `ExecutionContext` from the given `Executor`.
*
- * @param e the `Executor` to use
+ * @param e the `Executor` to use. If `null`, a new `Executor` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]].
* @param reporter a function for error reporting
* @return the `ExecutionContext` using the given `Executor`
*/
@@ -163,7 +165,7 @@ object ExecutionContext {
/** Creates an `ExecutionContext` from the given `Executor` with the [[scala.concurrent.ExecutionContext$.defaultReporter default reporter]].
*
- * @param e the `Executor` to use
+ * @param e the `Executor` to use. If `null`, a new `Executor` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]].
* @return the `ExecutionContext` using the given `Executor`
*/
def fromExecutor(e: Executor): ExecutionContextExecutor = fromExecutor(e, defaultReporter)
diff --git a/src/library/scala/runtime/Tuple2Zipped.scala b/src/library/scala/runtime/Tuple2Zipped.scala
index b28f6d4269..512c4fbc27 100644
--- a/src/library/scala/runtime/Tuple2Zipped.scala
+++ b/src/library/scala/runtime/Tuple2Zipped.scala
@@ -17,6 +17,10 @@ import scala.language.{ higherKinds, implicitConversions }
/** This interface is intended as a minimal interface, not complicated
* by the requirement to resolve type constructors, for implicit search (which only
* needs to find an implicit conversion to Traversable for our purposes.)
+ * @define Coll `ZippedTraversable2`
+ * @define coll collection
+ * @define collectExample
+ * @define willNotTerminateInf
*/
trait ZippedTraversable2[+El1, +El2] extends Any {
def foreach[U](f: (El1, El2) => U): Unit
diff --git a/src/library/scala/runtime/Tuple3Zipped.scala b/src/library/scala/runtime/Tuple3Zipped.scala
index 7c501380a3..ffd44acf81 100644
--- a/src/library/scala/runtime/Tuple3Zipped.scala
+++ b/src/library/scala/runtime/Tuple3Zipped.scala
@@ -14,7 +14,12 @@ import scala.collection.{ TraversableLike, IterableLike }
import scala.collection.generic.{ CanBuildFrom => CBF }
import scala.language.{ higherKinds, implicitConversions }
-/** See comment on ZippedTraversable2. */
+/** See comment on ZippedTraversable2
+ * @define Coll `ZippedTraversable3`
+ * @define coll collection
+ * @define collectExample
+ * @define willNotTerminateInf
+ */
trait ZippedTraversable3[+El1, +El2, +El3] extends Any {
def foreach[U](f: (El1, El2, El3) => U): Unit
}
diff --git a/src/library/scala/sys/SystemProperties.scala b/src/library/scala/sys/SystemProperties.scala
index 39f66f5030..d2ebf8c044 100644
--- a/src/library/scala/sys/SystemProperties.scala
+++ b/src/library/scala/sys/SystemProperties.scala
@@ -21,6 +21,8 @@ import scala.language.implicitConversions
* System properties. If a security manager is in place which prevents
* the properties from being read or written, the AccessControlException
* will be caught and discarded.
+ * @define Coll `collection.mutable.Map`
+ * @define coll mutable map
*
* @author Paul Phillips
* @version 2.9
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index f2517fff54..667ff7c4b4 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -248,6 +248,7 @@ trait StdNames {
final val Unliftable: NameType = "Unliftable"
final val Name: NameType = "Name"
final val Tree: NameType = "Tree"
+ final val Text: NameType = "Text"
final val TermName: NameType = "TermName"
final val Type : NameType = "Type"
final val TypeName: NameType = "TypeName"
@@ -778,6 +779,7 @@ trait StdNames {
val values : NameType = "values"
val wait_ : NameType = "wait"
val withFilter: NameType = "withFilter"
+ val xml: NameType = "xml"
val zero: NameType = "zero"
// quasiquote interpolators:
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index 51f06b1d6d..b2f176e894 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -792,6 +792,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def isDefinedInPackage = effectiveOwner.isPackageClass
final def needsFlatClasses = phase.flatClasses && rawowner != NoSymbol && !rawowner.isPackageClass
+ // TODO introduce a flag for these?
+ final def isPatternTypeVariable: Boolean =
+ isAbstractType && !isExistential && !isTypeParameterOrSkolem && isLocalToBlock
+
/** change name by appending $$<fully-qualified-name-of-class `base`>
* Do the same for any accessed symbols or setters/getters.
* Implementation in TermSymbol.
diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
index 5433bfad60..a35620a994 100644
--- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
+++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
@@ -396,7 +396,7 @@ abstract class UnPickler {
case NOtpe => NoType
case NOPREFIXtpe => NoPrefix
case THIStpe => ThisType(readSymbolRef())
- case SINGLEtpe => SingleType(readTypeRef(), readSymbolRef())
+ case SINGLEtpe => SingleType(readTypeRef(), readSymbolRef().filter(_.isStable)) // SI-7596 account for overloading
case SUPERtpe => SuperType(readTypeRef(), readTypeRef())
case CONSTANTtpe => ConstantType(readConstantRef())
case TYPEREFtpe => TypeRef(readTypeRef(), readSymbolRef(), readTypes())
diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala
index ac1159b2ac..bcefcc471f 100644
--- a/src/reflect/scala/reflect/io/AbstractFile.scala
+++ b/src/reflect/scala/reflect/io/AbstractFile.scala
@@ -48,14 +48,16 @@ object AbstractFile {
else null
/**
- * If the specified URL exists and is a readable zip or jar archive,
- * returns an abstract directory backed by it. Otherwise, returns
- * `null`.
+ * If the specified URL exists and is a regular file or a directory, returns an
+ * abstract regular file or an abstract directory, respectively, backed by it.
+ * Otherwise, returns `null`.
*/
- def getURL(url: URL): AbstractFile = {
- if (url == null || !Path.isExtensionJarOrZip(url.getPath)) null
- else ZipArchive fromURL url
- }
+ def getURL(url: URL): AbstractFile =
+ if (url.getProtocol == "file") {
+ val f = new java.io.File(url.getPath)
+ if (f.isDirectory) getDirectory(f)
+ else getFile(f)
+ } else null
def getResources(url: URL): AbstractFile = ZipArchive fromManifestURL url
}
diff --git a/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala b/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala
index 5edc051461..586b8a5257 100644
--- a/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala
+++ b/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala
@@ -11,12 +11,16 @@ private[reflect] trait ThreadLocalStorage {
trait ThreadLocalStorage[T] { def get: T; def set(newValue: T): Unit }
private class MyThreadLocalStorage[T](initialValue: => T) extends ThreadLocalStorage[T] {
// TODO: how do we use org.cliffc.high_scale_lib.NonBlockingHashMap here?
- val values = new java.util.concurrent.ConcurrentHashMap[Thread, T]()
+ // (we would need a version that uses weak keys)
+ private val values = java.util.Collections.synchronizedMap(new java.util.WeakHashMap[Thread, T]())
def get: T = {
if (values containsKey currentThread) values.get(currentThread)
else {
val value = initialValue
- values.putIfAbsent(currentThread, value)
+ // since the key is currentThread, and `values` is private, it
+ // would be impossible for a value to have been set after the
+ // above containsKey check. `putIfAbsent` is not necessary.
+ values.put(currentThread, value)
value
}
}
diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
index 1c751c8a9f..8bef424e2b 100644
--- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
@@ -19,6 +19,7 @@ import scala.reflect.internal.util.{ BatchSourceFile, ScalaClassLoader }
import ScalaClassLoader._
import scala.reflect.io.{ File, Directory }
import scala.tools.util._
+import io.AbstractFile
import scala.collection.generic.Clearable
import scala.concurrent.{ ExecutionContext, Await, Future, future }
import ExecutionContext.Implicits._
@@ -221,7 +222,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
nullary("power", "enable power user mode", powerCmd),
nullary("quit", "exit the interpreter", () => Result(keepRunning = false, None)),
cmd("replay", "[options]", "reset the repl and replay all previous commands", replayCommand),
- //cmd("require", "<path>", "add a jar or directory to the classpath", require), // TODO
+ cmd("require", "<path>", "add a jar to the classpath", require),
cmd("reset", "[options]", "reset the repl to its initial state, forgetting all session entries", resetCommand),
cmd("save", "<path>", "save replayable session to a file", saveCommand),
shCommand,
@@ -620,13 +621,57 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
val f = File(arg).normalize
if (f.exists) {
addedClasspath = ClassPath.join(addedClasspath, f.path)
- val totalClasspath = ClassPath.join(settings.classpath.value, addedClasspath)
- echo("Added '%s'. Your new classpath is:\n\"%s\"".format(f.path, totalClasspath))
- replay()
+ intp.addUrlsToClassPath(f.toURI.toURL)
+ echo("Added '%s' to classpath.".format(f.path, intp.global.classPath.asClasspathString))
+ repldbg("Added '%s'. Your new classpath is:\n\"%s\"".format(f.path, intp.global.classPath.asClasspathString))
}
else echo("The path '" + f + "' doesn't seem to exist.")
}
+ /** Adds jar file to the current classpath. Jar will only be added if it
+ * does not contain classes that already exist on the current classpath.
+ *
+ * Importantly, `require` adds jars to the classpath ''without'' resetting
+ * the state of the interpreter. This is in contrast to `replay` which can
+ * be used to add jars to the classpath and which creates a new instance of
+ * the interpreter and replays all interpreter expressions.
+ */
+ def require(arg: String): Unit = {
+ class InfoClassLoader extends java.lang.ClassLoader {
+ def classOf(arr: Array[Byte]): Class[_] =
+ super.defineClass(null, arr, 0, arr.length)
+ }
+
+ val f = File(arg).normalize
+
+ if (f.isDirectory) {
+ echo("Adding directories to the classpath is not supported. Add a jar instead.")
+ return
+ }
+
+ val jarFile = AbstractFile.getDirectory(new java.io.File(arg))
+
+ def flatten(f: AbstractFile): Iterator[AbstractFile] =
+ if (f.isClassContainer) f.iterator.flatMap(flatten)
+ else Iterator(f)
+
+ val entries = flatten(jarFile)
+ val cloader = new InfoClassLoader
+
+ def classNameOf(classFile: AbstractFile): String = cloader.classOf(classFile.toByteArray).getName
+ def alreadyDefined(clsName: String) = intp.classLoader.tryToLoadClass(clsName).isDefined
+ val exists = entries.filter(_.hasExtension("class")).map(classNameOf).exists(alreadyDefined)
+
+ if (!f.exists) echo(s"The path '$f' doesn't seem to exist.")
+ else if (exists) echo(s"The path '$f' cannot be loaded, because existing classpath entries conflict.") // TODO tell me which one
+ else {
+ addedClasspath = ClassPath.join(addedClasspath, f.path)
+ intp.addUrlsToClassPath(f.toURI.toURL)
+ echo("Added '%s' to classpath.".format(f.path, intp.global.classPath.asClasspathString))
+ repldbg("Added '%s'. Your new classpath is:\n\"%s\"".format(f.path, intp.global.classPath.asClasspathString))
+ }
+ }
+
def powerCmd(): Result = {
if (isReplPower) "Already in power mode."
else enablePowerMode(isDuringInit = false)
diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala
index 3f4922a602..b990e401ec 100644
--- a/src/repl/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala
@@ -18,9 +18,13 @@ import scala.reflect.internal.util.{ BatchSourceFile, SourceFile }
import scala.tools.util.PathResolver
import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.typechecker.{ TypeStrings, StructuredTypeStrings }
-import scala.tools.nsc.util.{ ScalaClassLoader, stringFromReader, stringFromWriter, StackTraceOps }
+import scala.tools.nsc.util.{ ScalaClassLoader, stringFromReader, stringFromWriter, StackTraceOps, ClassPath, MergedClassPath }
+import ScalaClassLoader.URLClassLoader
import scala.tools.nsc.util.Exceptional.unwrap
+import scala.tools.nsc.backend.JavaPlatform
import javax.script.{AbstractScriptEngine, Bindings, ScriptContext, ScriptEngine, ScriptEngineFactory, ScriptException, CompiledScript, Compilable}
+import java.net.URL
+import java.io.File
/** An interpreter for Scala code.
*
@@ -82,6 +86,8 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
private var _classLoader: util.AbstractFileClassLoader = null // active classloader
private val _compiler: ReplGlobal = newCompiler(settings, reporter) // our private compiler
+ private var _runtimeClassLoader: URLClassLoader = null // wrapper exposing addURL
+
def compilerClasspath: Seq[java.net.URL] = (
if (isInitializeComplete) global.classPath.asURLs
else new PathResolver(settings).result.asURLs // the compiler's classpath
@@ -237,6 +243,18 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
new Global(settings, reporter) with ReplGlobal { override def toString: String = "<global>" }
}
+ /**
+ * Adds all specified jars to the compile and runtime classpaths.
+ *
+ * @note Currently only supports jars, not directories.
+ * @param urls The list of items to add to the compile and runtime classpaths.
+ */
+ def addUrlsToClassPath(urls: URL*): Unit = {
+ new Run // force some initialization
+ urls.foreach(_runtimeClassLoader.addURL) // Add jars to runtime classloader
+ global.extendCompilerClassPath(urls: _*) // Add jars to compile-time classpath
+ }
+
/** Parent classloader. Overridable. */
protected def parentClassLoader: ClassLoader =
settings.explicitParentLoader.getOrElse( this.getClass.getClassLoader() )
@@ -329,9 +347,9 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
}
}
private def makeClassLoader(): util.AbstractFileClassLoader =
- new TranslatingClassLoader(parentClassLoader match {
- case null => ScalaClassLoader fromURLs compilerClasspath
- case p => new ScalaClassLoader.URLClassLoader(compilerClasspath, p)
+ new TranslatingClassLoader({
+ _runtimeClassLoader = new URLClassLoader(compilerClasspath, parentClassLoader)
+ _runtimeClassLoader
})
// Set the current Java "context" class loader to this interpreter's class loader
diff --git a/src/scaladoc/scala/tools/nsc/doc/DocParser.scala b/src/scaladoc/scala/tools/nsc/doc/DocParser.scala
index 6dc3e5a62b..f03b848af6 100644
--- a/src/scaladoc/scala/tools/nsc/doc/DocParser.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/DocParser.scala
@@ -15,13 +15,14 @@ import DocParser.Parsed
* right after parsing so it can read `DocDefs` from source code which would
* otherwise cause the compiler to go haywire.
*/
-class DocParser(settings: nsc.Settings, reporter: Reporter) extends Global(settings, reporter) {
+class DocParser(settings: nsc.Settings, reporter: Reporter) extends Global(settings, reporter) with ScaladocGlobalTrait {
def this(settings: Settings) = this(settings, new ConsoleReporter(settings))
def this() = this(new Settings(Console println _))
// the usual global initialization
locally { new Run() }
+ override def forScaladoc = true
override protected def computeInternalPhases() {
phasesSet += syntaxAnalyzer
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
index 7a67055ffa..7289edc137 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -313,7 +313,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
/* Subclass cache */
private lazy val subClassesCache = (
- if (sym == AnyRefClass) null
+ if (sym == AnyRefClass || sym == AnyClass) null
else mutable.ListBuffer[DocTemplateEntity]()
)
def registerSubClass(sc: DocTemplateEntity): Unit = {
diff --git a/test/files/neg/t2866.check b/test/files/neg/t2866.check
new file mode 100644
index 0000000000..340fb8da22
--- /dev/null
+++ b/test/files/neg/t2866.check
@@ -0,0 +1,17 @@
+t2866.scala:30: warning: imported `one' is permanently hidden by definition of value one
+ import A.one // warning: imported `one' is permanently hidden by definition of value one.
+ ^
+t2866.scala:42: error: ambiguous implicit values:
+ both value two of type Int
+ and value one in object A of type => Int
+ match expected type Int
+ assert(implicitly[Int] == 2) // !!! Not ambiguous in 2.8.0. Ambigous in 2.7.6
+ ^
+t2866.scala:50: error: ambiguous implicit values:
+ both value two of type Int
+ and value one in object A of type => Int
+ match expected type Int
+ assert(implicitly[Int] == 2) // !!! Not ambiguous in 2.8.0. Ambiguous in 2.7.6
+ ^
+one warning found
+two errors found
diff --git a/test/files/neg/t2866.scala b/test/files/neg/t2866.scala
new file mode 100644
index 0000000000..55ebff9710
--- /dev/null
+++ b/test/files/neg/t2866.scala
@@ -0,0 +1,59 @@
+// for 2.7.x compatibility
+
+object A {
+ implicit val one = 1
+}
+
+object Test {
+
+ locally {
+ import A._
+ locally {
+ // assert(implicitly[Int] == 1) // error: could not find implicit value for parameter e: Int.
+ // !!! Why one A.one?
+ // (I assume you mean: why _not_ A.one? A.one is shadowed by local one.
+ // but the local one cannot be used yet because it does not have an explicit type.
+ implicit val one = 2
+ assert(implicitly[Int] == 2)
+ assert(one == 2)
+ }
+ }
+
+ locally {
+ import A._
+ implicit val one: Int = 2
+ assert(implicitly[Int] == 2)
+ assert(one == 2)
+ }
+
+ locally {
+ import A.one // warning: imported `one' is permanently hidden by definition of value one.
+ // !!! Really?
+ //assert(implicitly[Int] == 1)
+ implicit val one = 2
+ assert(implicitly[Int] == 2) // !!! why not 2?
+ assert(one == 2)
+ }
+
+ locally {
+ import A.one
+ assert(implicitly[Int] == 1)
+ implicit val two = 2
+ assert(implicitly[Int] == 2) // !!! Not ambiguous in 2.8.0. Ambigous in 2.7.6
+ }
+
+ locally {
+ import A._
+ assert(implicitly[Int] == 1)
+ implicit val two = 2
+ import A.{one => _}
+ assert(implicitly[Int] == 2) // !!! Not ambiguous in 2.8.0. Ambiguous in 2.7.6
+ }
+
+ locally {
+ import A.{one => _, _}
+ implicit val two = 2
+ assert(implicitly[Int] == 2) // not ambiguous in 2.8.0 nor im ambiguous in 2.7.6
+ }
+
+}
diff --git a/test/files/neg/t5639b.check b/test/files/neg/t5639b.check
new file mode 100644
index 0000000000..faa1766660
--- /dev/null
+++ b/test/files/neg/t5639b.check
@@ -0,0 +1,4 @@
+A_2.scala:6: error: could not find implicit value for parameter e: Int
+ implicitly[Int]
+ ^
+one error found
diff --git a/test/files/neg/t5639b/A_1.scala b/test/files/neg/t5639b/A_1.scala
new file mode 100644
index 0000000000..c5da10eae4
--- /dev/null
+++ b/test/files/neg/t5639b/A_1.scala
@@ -0,0 +1,17 @@
+import Implicits._
+
+class Baz
+
+object Test {
+ implicitly[Int]
+}
+
+object Implicits {
+ implicit val Baz: Int = 0
+ // This implicit was being ignored by `isQualifyingImplicit`
+ // if the classpath contained a class file for `class Baz`.
+ // This is because the package scope contains a speculative
+ // symbol for `object Baz` which is entered by `SymbolLoaders`
+ // before looking inside the class file. (A Java originated
+ // classfile results in the class/module symbol pair.)
+}
diff --git a/test/files/neg/t5639b/A_2.scala b/test/files/neg/t5639b/A_2.scala
new file mode 100644
index 0000000000..2bb36273e0
--- /dev/null
+++ b/test/files/neg/t5639b/A_2.scala
@@ -0,0 +1,11 @@
+import Implicits._
+
+class Baz
+
+object Test {
+ implicitly[Int]
+}
+
+object Implicits {
+ implicit val Baz: Int = 0
+}
diff --git a/test/files/neg/t8534.check b/test/files/neg/t8534.check
new file mode 100644
index 0000000000..297e7c1beb
--- /dev/null
+++ b/test/files/neg/t8534.check
@@ -0,0 +1,4 @@
+t8534.scala:6: error: MyTrait is not an enclosing class
+ class BugTest {def isTheBugHere(in: MyTrait.this.type#SomeData) = false}
+ ^
+one error found
diff --git a/test/files/neg/t8534.scala b/test/files/neg/t8534.scala
new file mode 100644
index 0000000000..f118d22b82
--- /dev/null
+++ b/test/files/neg/t8534.scala
@@ -0,0 +1,7 @@
+object line1 {
+ trait MyTrait
+}
+object line2 {
+ import line2._
+ class BugTest {def isTheBugHere(in: MyTrait.this.type#SomeData) = false}
+}
diff --git a/test/files/neg/t8534b.check b/test/files/neg/t8534b.check
new file mode 100644
index 0000000000..39ffa41194
--- /dev/null
+++ b/test/files/neg/t8534b.check
@@ -0,0 +1,4 @@
+t8534b.scala:3: error: stable identifier required, but foo.type found.
+ type T = foo.type#Foo
+ ^
+one error found
diff --git a/test/files/neg/t8534b.scala b/test/files/neg/t8534b.scala
new file mode 100644
index 0000000000..73b6703a9c
--- /dev/null
+++ b/test/files/neg/t8534b.scala
@@ -0,0 +1,4 @@
+object Test {
+ def foo = ""
+ type T = foo.type#Foo
+}
diff --git a/test/files/neg/t8597.check b/test/files/neg/t8597.check
new file mode 100644
index 0000000000..bc945f9191
--- /dev/null
+++ b/test/files/neg/t8597.check
@@ -0,0 +1,21 @@
+t8597.scala:2: warning: abstract type T in type pattern Some[T] is unchecked since it is eliminated by erasure
+ def nowarn[T] = (null: Any) match { case _: Some[T] => } // warn (did not warn due to SI-8597)
+ ^
+t8597.scala:5: warning: abstract type pattern T is unchecked since it is eliminated by erasure
+ def warn1[T] = (null: Any) match { case _: T => } // warn
+ ^
+t8597.scala:6: warning: non-variable type argument String in type pattern Some[String] is unchecked since it is eliminated by erasure
+ def warn2 = (null: Any) match { case _: Some[String] => } // warn
+ ^
+t8597.scala:7: warning: non-variable type argument Unchecked.this.C in type pattern Some[Unchecked.this.C] is unchecked since it is eliminated by erasure
+ (null: Any) match { case _: Some[C] => } // warn
+ ^
+t8597.scala:18: warning: abstract type T in type pattern Array[T] is unchecked since it is eliminated by erasure
+ def warnArray[T] = (null: Any) match { case _: Array[T] => } // warn (did not warn due to SI-8597)
+ ^
+t8597.scala:26: warning: non-variable type argument String in type pattern Array[Array[List[String]]] is unchecked since it is eliminated by erasure
+ def warnArrayErasure2 = (null: Any) match {case Some(_: Array[Array[List[String]]]) => } // warn
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+6 warnings found
+one error found
diff --git a/test/files/neg/t8597.flags b/test/files/neg/t8597.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/neg/t8597.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/neg/t8597.scala b/test/files/neg/t8597.scala
new file mode 100644
index 0000000000..068e87d91a
--- /dev/null
+++ b/test/files/neg/t8597.scala
@@ -0,0 +1,27 @@
+class Unchecked[C] {
+ def nowarn[T] = (null: Any) match { case _: Some[T] => } // warn (did not warn due to SI-8597)
+
+ // These warned before.
+ def warn1[T] = (null: Any) match { case _: T => } // warn
+ def warn2 = (null: Any) match { case _: Some[String] => } // warn
+ (null: Any) match { case _: Some[C] => } // warn
+
+ // These must remain without warnings. These are excerpts from
+ // related tests that are more exhauative.
+ class C; class D extends C
+ def okay = (List(new D) : Seq[D]) match { case _: List[C] => case _ => } // nowarn
+ class B2[A, B]
+ class A2[X] extends B2[X, String]
+ def okay2(x: A2[Int]) = x match { case _: B2[Int, _] => true } // nowarn
+ def okay3(x: A2[Int]) = x match { case _: B2[Int, typeVar] => true } // nowarn
+
+ def warnArray[T] = (null: Any) match { case _: Array[T] => } // warn (did not warn due to SI-8597)
+ def nowarnArrayC = (null: Any) match { case _: Array[C] => } // nowarn
+
+ def nowarnArrayTypeVar[T] = (null: Any) match { case _: Array[t] => } // nowarn
+
+ def noWarnArrayErasure1 = (null: Any) match {case Some(_: Array[String]) => } // nowarn
+ def noWarnArrayErasure2 = (null: Any) match {case Some(_: Array[List[_]]) => } // nowarn
+ def noWarnArrayErasure3 = (null: Any) match {case Some(_: Array[Array[List[_]]]) => } // nowarn
+ def warnArrayErasure2 = (null: Any) match {case Some(_: Array[Array[List[String]]]) => } // warn
+}
diff --git a/test/files/neg/t8597b.check b/test/files/neg/t8597b.check
new file mode 100644
index 0000000000..3c45a31337
--- /dev/null
+++ b/test/files/neg/t8597b.check
@@ -0,0 +1,6 @@
+t8597b.scala:18: warning: non-variable type argument T in type pattern Some[T] is unchecked since it is eliminated by erasure
+ case _: Some[T] => // warn
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/t8597b.flags b/test/files/neg/t8597b.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/neg/t8597b.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/neg/t8597b.scala b/test/files/neg/t8597b.scala
new file mode 100644
index 0000000000..b29d591cb1
--- /dev/null
+++ b/test/files/neg/t8597b.scala
@@ -0,0 +1,21 @@
+object Unchecked {
+ (null: Any) match {
+ case _: Some[t] =>
+
+ // t is a fresh pattern type variable, despite our attempts to
+ // backtick our way to the enclosing `t`. Under this interpretation,
+ // the absense of an unchecked warning is expected.
+ (null: Any) match {
+ case _: Some[t] => // no warn
+ }
+ (null: Any) match {
+ case _: Some[`t`] => // no warn
+ }
+
+ // here we correctly issue an unchecked warning
+ type T = t
+ (null: Any) match {
+ case _: Some[T] => // warn
+ }
+ }
+}
diff --git a/test/files/neg/t963.check b/test/files/neg/t963.check
index 4dc202c7bd..483e53c77d 100644
--- a/test/files/neg/t963.check
+++ b/test/files/neg/t963.check
@@ -1,9 +1,9 @@
-t963.scala:14: error: stable identifier required, but Test.this.y3.x found.
+t963.scala:14: error: stable identifier required, but y3.x.type found.
val w3 : y3.x.type = y3.x
- ^
-t963.scala:17: error: stable identifier required, but Test.this.y4.x found.
+ ^
+t963.scala:17: error: stable identifier required, but y4.x.type found.
val w4 : y4.x.type = y4.x
- ^
+ ^
t963.scala:10: error: type mismatch;
found : AnyRef{def x: Integer}
required: AnyRef{val x: Integer}
diff --git a/test/files/neg/unchecked-abstract.check b/test/files/neg/unchecked-abstract.check
index 72019082ac..703929dca8 100644
--- a/test/files/neg/unchecked-abstract.check
+++ b/test/files/neg/unchecked-abstract.check
@@ -4,6 +4,9 @@ unchecked-abstract.scala:16: warning: abstract type H in type Contravariant[M.th
unchecked-abstract.scala:21: warning: abstract type H in type Contravariant[M.this.H] is unchecked since it is eliminated by erasure
/* warn */ println(x.isInstanceOf[Contravariant[H]])
^
+unchecked-abstract.scala:22: warning: abstract type T in type Contravariant[M.this.T] is unchecked since it is eliminated by erasure
+ /* warn */ println(x.isInstanceOf[Contravariant[T]])
+ ^
unchecked-abstract.scala:27: warning: abstract type T in type Invariant[M.this.T] is unchecked since it is eliminated by erasure
/* warn */ println(x.isInstanceOf[Invariant[T]])
^
@@ -22,6 +25,15 @@ unchecked-abstract.scala:36: warning: abstract type H in type Invariant[M.this.H
unchecked-abstract.scala:37: warning: abstract type T in type Invariant[M.this.T] is unchecked since it is eliminated by erasure
/* warn */ println(x.isInstanceOf[Invariant[T]])
^
+unchecked-abstract.scala:42: warning: abstract type T in type Covariant[M.this.T] is unchecked since it is eliminated by erasure
+ /* warn */ println(x.isInstanceOf[Covariant[T]])
+ ^
+unchecked-abstract.scala:43: warning: abstract type L in type Covariant[M.this.L] is unchecked since it is eliminated by erasure
+ /* warn */ println(x.isInstanceOf[Covariant[L]])
+ ^
+unchecked-abstract.scala:48: warning: abstract type L in type Covariant[M.this.L] is unchecked since it is eliminated by erasure
+ /* warn */ println(x.isInstanceOf[Covariant[L]])
+ ^
error: No warnings can be incurred under -Xfatal-warnings.
-8 warnings found
+12 warnings found
one error found
diff --git a/test/files/pos/t5639.flags b/test/files/pos/t5639.flags
new file mode 100644
index 0000000000..0acce1e7ce
--- /dev/null
+++ b/test/files/pos/t5639.flags
@@ -0,0 +1 @@
+-Xsource:2.12
diff --git a/test/files/pos/t5639/A_1.scala b/test/files/pos/t5639/A_1.scala
new file mode 100644
index 0000000000..c5da10eae4
--- /dev/null
+++ b/test/files/pos/t5639/A_1.scala
@@ -0,0 +1,17 @@
+import Implicits._
+
+class Baz
+
+object Test {
+ implicitly[Int]
+}
+
+object Implicits {
+ implicit val Baz: Int = 0
+ // This implicit was being ignored by `isQualifyingImplicit`
+ // if the classpath contained a class file for `class Baz`.
+ // This is because the package scope contains a speculative
+ // symbol for `object Baz` which is entered by `SymbolLoaders`
+ // before looking inside the class file. (A Java originated
+ // classfile results in the class/module symbol pair.)
+}
diff --git a/test/files/pos/t5639/A_2.scala b/test/files/pos/t5639/A_2.scala
new file mode 100644
index 0000000000..2bb36273e0
--- /dev/null
+++ b/test/files/pos/t5639/A_2.scala
@@ -0,0 +1,11 @@
+import Implicits._
+
+class Baz
+
+object Test {
+ implicitly[Int]
+}
+
+object Implicits {
+ implicit val Baz: Int = 0
+}
diff --git a/test/files/pos/t5639/Bar.scala b/test/files/pos/t5639/Bar.scala
deleted file mode 100644
index f577500acd..0000000000
--- a/test/files/pos/t5639/Bar.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package pack.age
-
-import pack.age.Implicits._
-
-object Quux {
- def baz : Baz = 1
-}
diff --git a/test/files/pos/t5639/Foo.scala b/test/files/pos/t5639/Foo.scala
deleted file mode 100644
index 1a07734a8e..0000000000
--- a/test/files/pos/t5639/Foo.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package pack.age
-
-class Baz
-
-object Implicits {
- implicit def Baz(n: Int): Baz = new Baz
-}
diff --git a/test/files/pos/t7596/A_1.scala b/test/files/pos/t7596/A_1.scala
new file mode 100644
index 0000000000..6303c6d132
--- /dev/null
+++ b/test/files/pos/t7596/A_1.scala
@@ -0,0 +1,10 @@
+trait Driver {
+ abstract class Table
+}
+
+object Config {
+ val driver : Driver = ???
+ def driver(a: Any) = ???
+}
+
+object Sites extends Config.driver.Table
diff --git a/test/files/pos/t7596/B_2.scala b/test/files/pos/t7596/B_2.scala
new file mode 100644
index 0000000000..977e5c8bd1
--- /dev/null
+++ b/test/files/pos/t7596/B_2.scala
@@ -0,0 +1,19 @@
+object Test {
+ locally {
+ Sites: Config.driver.Table
+ }
+}
+
+// Under separate compilation, the pickler is foiled by the
+// overloaded term `Config.driver`, and results in:
+
+// qbin/scalac test/files/pos/t7596/A_1.scala && qbin/scalac -explaintypes test/files/pos/t7596/B_2.scala
+// test/files/pos/t7596/B_2.scala:3: error: type mismatch;
+// found : Sites.type
+// required: Config.driver.Table
+// Sites: Config.driver.Table
+// ^
+// Sites.type <: Config.driver.Table?
+// Driver.this.type = Config.driver.type?
+// false
+// false \ No newline at end of file
diff --git a/test/files/pos/t7596b/A.scala b/test/files/pos/t7596b/A.scala
new file mode 100644
index 0000000000..65c1bc56ef
--- /dev/null
+++ b/test/files/pos/t7596b/A.scala
@@ -0,0 +1,10 @@
+trait H2Driver{
+ abstract class Table[T]
+}
+
+object Config {
+ val driver : H2Driver = ???
+ def driver(app: Any): H2Driver = ???
+}
+
+class Sites extends Config.driver.Table[String]
diff --git a/test/files/pos/t7596b/B.scala b/test/files/pos/t7596b/B.scala
new file mode 100644
index 0000000000..cbcf149c23
--- /dev/null
+++ b/test/files/pos/t7596b/B.scala
@@ -0,0 +1,6 @@
+class DAOBase[E]{
+ type TableType <: Config.driver.Table[E]
+}
+class SitesDAO extends DAOBase[String]{
+ type TableType = Sites
+}
diff --git a/test/files/pos/t7596c/A_1.scala b/test/files/pos/t7596c/A_1.scala
new file mode 100644
index 0000000000..3e366df477
--- /dev/null
+++ b/test/files/pos/t7596c/A_1.scala
@@ -0,0 +1,11 @@
+trait Driver {
+ abstract class Table
+}
+
+object Config {
+ val driver : Driver = ???
+ val driverUniqueName: driver.type = driver
+ def driver(a: Any) = ???
+}
+
+object Sites extends Config.driver.Table
diff --git a/test/files/pos/t7596c/B_2.scala b/test/files/pos/t7596c/B_2.scala
new file mode 100644
index 0000000000..33da68c1ff
--- /dev/null
+++ b/test/files/pos/t7596c/B_2.scala
@@ -0,0 +1,9 @@
+object Test {
+ locally {
+ Sites: Config.driver.Table
+ }
+}
+
+// This variation worked by avoiding referring to the
+// overloaded term `Config.driver` in the parent type of
+// Sites \ No newline at end of file
diff --git a/test/files/run/iterator-concat.check b/test/files/run/iterator-concat.check
deleted file mode 100644
index 23835b07ae..0000000000
--- a/test/files/run/iterator-concat.check
+++ /dev/null
@@ -1,4 +0,0 @@
-100
-1000
-10000
-100000
diff --git a/test/files/run/iterator-concat.scala b/test/files/run/iterator-concat.scala
deleted file mode 100644
index f11363410f..0000000000
--- a/test/files/run/iterator-concat.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-object Test {
- // Create `size` Function0s, each of which evaluates to an Iterator
- // which produces 1. Then fold them over ++ to get a single iterator,
- // which should sum to "size".
- def mk(size: Int): Iterator[Int] = {
- val closures = (1 to size).toList.map(x => (() => Iterator(1)))
- closures.foldLeft(Iterator.empty: Iterator[Int])((res, f) => res ++ f())
- }
- def main(args: Array[String]): Unit = {
- println(mk(100).sum)
- println(mk(1000).sum)
- println(mk(10000).sum)
- println(mk(100000).sum)
- }
-}
diff --git a/test/files/run/iterator-iterate-lazy.scala b/test/files/run/iterator-iterate-lazy.scala
deleted file mode 100644
index 92b170062e..0000000000
--- a/test/files/run/iterator-iterate-lazy.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-object Test {
- def main(args: Array[String]): Unit = {
- Iterator.iterate((1 to 5).toList)(_.tail).takeWhile(_.nonEmpty).map(_.head).toList
- }
-}
diff --git a/test/files/run/iterators.check b/test/files/run/iterators.check
deleted file mode 100644
index abaf80ff38..0000000000
--- a/test/files/run/iterators.check
+++ /dev/null
@@ -1,14 +0,0 @@
-test check_from was successful
-test check_range was successful
-test check_range2 was successful
-test check_range3 was successful
-test check_take was successful
-test check_drop was successful
-test check_slice was successful
-test check_foreach was successful
-test check_forall was successful
-test check_fromArray was successful
-test check_toSeq was successful
-test check_indexOf was successful
-test check_findIndexOf was successful
-
diff --git a/test/files/run/iterators.scala b/test/files/run/iterators.scala
deleted file mode 100644
index d54da3d3ba..0000000000
--- a/test/files/run/iterators.scala
+++ /dev/null
@@ -1,168 +0,0 @@
-//############################################################################
-// Iterators
-//############################################################################
-
-//############################################################################
-
-import scala.language.postfixOps
-
-object Test {
-
- def check_from: Int = {
- val it1 = Iterator.from(-1)
- val it2 = Iterator.from(0, -1)
- it1.next + it2.next
- }
-
- def check_range: Int = {
- val xs1 = Iterator.range(0, 10, 2) toList;
- val xs2 = Iterator.range(0, 10, -2) toList;
- val xs3 = Iterator.range(10, 0, -2) toList;
- val xs4 = Iterator.range(10, 0, 2) toList;
- val xs5 = Iterator.range(0, 10, 11) toList;
- xs1.length + xs2.length + xs3.length + xs4.length + xs5.length
- }
-
- def check_range2: Int = {
- val r1start = 0
- val r1end = 10
- val r1step = 1
- val r1 = Iterator.range(r1start, r1end, r1step) toList;
- val r2 = Iterator.range(r1start, r1end, r1step + 1) toList;
- val r3 = Iterator.range(r1end, r1start, -r1step) toList;
- val r4 = Iterator.range(0, 10, 11) toList;
- // 10 + 5 + 10 + 1
- r1.length + r2.length + r3.length + r4.length
- }
-
- def check_range3: Int = {
- def trues(xs: List[Boolean]) = xs.foldLeft(0)((a, b) => if (b) a+1 else a)
- val r1 = Iterator.range(0, 10)
- val xs1 = List(r1 contains 5, r1 contains 6)
- val r2a = Iterator.range(0, 10, 2)
- val r2b = Iterator.range(0, 10, 2)
- val xs2 = List(r2a contains 5, r2b contains 6)
- val r3 = Iterator.range(0, 10, 11)
- val xs3 = List(r3 contains 5, r3 contains 6)
- // 2 + 1 + 0
- trues(xs1) + trues(xs2) + trues(xs3)
- }
-
- def check_take: Int = {
- val it1 = Iterator.from(0)
- val xs1 = it1 take 10 toList;
- xs1.length
- }
-
- def check_drop: Int = {
- val tests = Array(
- Iterator.from(0).take(5).drop(1).toSeq sameElements Seq(1, 2, 3, 4),
- Iterator.from(0).take(5).drop(3).toSeq sameElements Seq(3, 4),
-
- Iterator.from(0).take(5).drop(5).toSeq sameElements Seq(),
- Iterator.from(0).take(5).drop(10).toSeq sameElements Seq(),
-
- Iterator.from(0).take(5).drop(0).toSeq sameElements Seq(0, 1, 2, 3, 4),
- Iterator.from(0).take(5).drop(-1).toSeq sameElements Seq(0, 1, 2, 3, 4),
-
- Iterator.from(0).take(5).drop(1).map(2 * _).toSeq sameElements Seq(2, 4, 6, 8),
- Iterator.from(0).take(5).map(2 * _).drop(1).toSeq sameElements Seq(2, 4, 6, 8),
-
- Iterator.from(0).take(5).drop(1).drop(2).toSeq sameElements Seq(3, 4),
- Iterator.from(0).take(5).drop(2).drop(1).toSeq sameElements Seq(3, 4)
- )
- tests.count(result => result)
- }
-
- def check_slice: Int = {
- val tests = Array(
- Iterator.from(0).slice(3, 7).toSeq sameElements Seq(3, 4, 5, 6),
- Iterator.from(0).slice(3, 3).toSeq sameElements Seq(),
- Iterator.from(0).slice(-1, 3).toSeq sameElements Seq(0, 1, 2),
- Iterator.from(0).slice(3, -1).toSeq sameElements Seq(),
-
- Iterator.from(0).slice(3, 7).map(2 * _).toSeq sameElements Seq(6, 8, 10, 12),
- Iterator.from(0).map(2 * _).slice(3, 7).toSeq sameElements Seq(6, 8, 10, 12),
-
- Iterator.from(0).slice(3, 7).drop(1).toSeq sameElements Seq(4, 5, 6),
- Iterator.from(0).drop(1).slice(3, 7).toSeq sameElements Seq(4, 5, 6, 7),
-
- Iterator.from(0).slice(3, 7).slice(1, 3).toSeq sameElements Seq(4, 5),
- Iterator.from(0).slice(3, 7).slice(1, 10).toSeq sameElements Seq(4, 5, 6)
- )
- tests.count(result => result)
- }
-
- def check_foreach: Int = {
- val it1 = Iterator.from(0) take 20
- var n = 0
- it1 foreach { n += _ }
- n
- }
-
- def check_forall: Int = {
- val it1 = Iterator.from(0)
- val it2 = Iterator.from(1)
- 0
- }
-
- def check_fromArray: Int = { // ticket #429
- val a = List(1, 2, 3, 4).toArray
- var xs0 = a.iterator.toList;
- var xs1 = a.slice(0, 1).iterator.toList;
- var xs2 = a.slice(0, 2).iterator.toList;
- var xs3 = a.slice(0, 3).iterator.toList;
- var xs4 = a.slice(0, 4).iterator.toList;
- xs0.length + xs1.length + xs2.length + xs3.length + xs4.length
- }
-
- def check_toSeq: String =
- List(1, 2, 3, 4, 5).iterator.toSeq.mkString("x")
-
- def check_indexOf: String = {
- val i = List(1, 2, 3, 4, 5).indexOf(4)
- val j = List(1, 2, 3, 4, 5).indexOf(16)
- "" + i + "x" + j
- }
-
- def check_findIndexOf: String = {
- val i = List(1, 2, 3, 4, 5).indexWhere { x: Int => x >= 4 }
- val j = List(1, 2, 3, 4, 5).indexWhere { x: Int => x >= 16 }
- "" + i + "x" + j
- }
-
- def check_success[A](name: String, closure: => A, expected: A) {
- print("test " + name)
- try {
- val actual: A = closure
- if (actual == expected)
- print(" was successful")
- else
- print(" failed: expected "+ expected +", found "+ actual)
- }
- catch {
- case exception: Throwable =>
- print(" raised exception " + exception)
- }
- println()
- }
-
- def main(args: Array[String]) {
- check_success("check_from", check_from, -1)
- check_success("check_range", check_range, 11)
- check_success("check_range2", check_range2, 26)
- check_success("check_range3", check_range3, 3)
- check_success("check_take", check_take, 10)
- check_success("check_drop", check_drop, 10)
- check_success("check_slice", check_slice, 10)
- check_success("check_foreach", check_foreach, 190)
- check_success("check_forall", check_forall, 0)
- check_success("check_fromArray",check_fromArray, 14)
- check_success("check_toSeq", check_toSeq, "1x2x3x4x5")
- check_success("check_indexOf", check_indexOf, "3x-1")
- check_success("check_findIndexOf", check_findIndexOf, "3x-1")
- println()
- }
-}
-
-//############################################################################
diff --git a/test/files/run/t2866.check b/test/files/run/t2866.check
new file mode 100644
index 0000000000..7f52da85fb
--- /dev/null
+++ b/test/files/run/t2866.check
@@ -0,0 +1,3 @@
+t2866.scala:30: warning: imported `one' is permanently hidden by definition of value one
+ import A.one // warning: imported `one' is permanently hidden by definition of value one.
+ ^
diff --git a/test/files/run/t2866.scala b/test/files/run/t2866.scala
new file mode 100644
index 0000000000..8059107583
--- /dev/null
+++ b/test/files/run/t2866.scala
@@ -0,0 +1,44 @@
+// for 2.7.x compatibility
+
+object A {
+ implicit val one = 1
+}
+
+object Test extends App {
+
+ locally {
+ import A._
+ locally {
+ // assert(implicitly[Int] == 1) // error: could not find implicit value for parameter e: Int.
+ // !!! Why one A.one?
+ // (I assume you mean: why _not_ A.one? A.one is shadowed by local one.
+ // but the local one cannot be used yet because it does not have an explicit type.
+ implicit val one = 2
+ assert(implicitly[Int] == 2)
+ assert(one == 2)
+ }
+ }
+
+ locally {
+ import A._
+ implicit val one: Int = 2
+ assert(implicitly[Int] == 2)
+ assert(one == 2)
+ }
+
+ locally {
+ import A.one // warning: imported `one' is permanently hidden by definition of value one.
+ // !!! Really?
+ //assert(implicitly[Int] == 1)
+ implicit val one = 2
+ assert(implicitly[Int] == 2) // !!! why not 2?
+ assert(one == 2)
+ }
+
+ locally {
+ import A.{one => _, _}
+ implicit val two = 2
+ assert(implicitly[Int] == 2) // not ambiguous in 2.8.0 nor im ambiguous in 2.7.6
+ }
+
+}
diff --git a/test/files/run/t3516.check b/test/files/run/t3516.check
deleted file mode 100644
index d0d10d82fa..0000000000
--- a/test/files/run/t3516.check
+++ /dev/null
@@ -1,3 +0,0 @@
-1
-1
-21
diff --git a/test/files/run/t3516.scala b/test/files/run/t3516.scala
deleted file mode 100644
index aa302ce85a..0000000000
--- a/test/files/run/t3516.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-object Test {
- def mkIterator = (1 to 5).iterator map (x => { println(x) ; x })
- def mkInfinite = Iterator continually { println(1) ; 1 }
-
- def main(args: Array[String]): Unit = {
- // Stream is strict in its head so we should see 1 from each of them.
- val s1 = mkIterator.toStream
- val s2 = mkInfinite.toStream
- // back and forth without slipping into nontermination.
- println((Stream from 1).toIterator.drop(10).toStream.drop(10).toIterator.next)
- ()
- }
-}
diff --git a/test/files/run/t5938.scala b/test/files/run/t5938.scala
new file mode 100644
index 0000000000..59a95ac37f
--- /dev/null
+++ b/test/files/run/t5938.scala
@@ -0,0 +1,35 @@
+import scala.tools.partest.DirectTest
+
+object Test extends DirectTest {
+
+ override def extraSettings: String =
+ s"-usejavacp -d ${testOutput.path}"
+
+ override def code = """
+object O extends C {
+ def main(args: Array[String]): Unit = {
+ }
+ // Static forwarder for foo and setter_foo_= added more once in a multi-run compile.
+}
+ """.trim
+
+ override def show(): Unit = {
+ val global = newCompiler()
+ Console.withErr(System.out) {
+ compileString(global)(code)
+ compileString(global)(code)
+ loadClass // was "duplicate name and signature in class X"
+ }
+ }
+
+ def loadClass: Class[_] = {
+ val cl = new java.net.URLClassLoader(Array(testOutput.toFile.toURL));
+ cl.loadClass("O")
+ }
+}
+
+trait T {
+ val foo: String = ""
+}
+class C extends T
+
diff --git a/test/files/run/t6502.check b/test/files/run/t6502.check
new file mode 100644
index 0000000000..95d36ee221
--- /dev/null
+++ b/test/files/run/t6502.check
@@ -0,0 +1,8 @@
+test1 res1: true
+test1 res2: true
+test2 res1: true
+test2 res2: true
+test3 res1: true
+test3 res2: true
+test4 res1: true
+test4 res2: true
diff --git a/test/files/run/t6502.scala b/test/files/run/t6502.scala
new file mode 100644
index 0000000000..ced1b5812d
--- /dev/null
+++ b/test/files/run/t6502.scala
@@ -0,0 +1,101 @@
+import scala.tools.partest._
+import java.io.File
+import scala.tools.nsc.interpreter.ILoop
+
+object Test extends StoreReporterDirectTest {
+ def code = ???
+
+ def compileCode(code: String, jarFileName: String) = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", s"${testOutput.path}/$jarFileName"))(code)
+ }
+
+ def app1 = """
+ package test
+
+ object Test extends App {
+ def test(): Unit = {
+ println("testing...")
+ }
+ }"""
+
+ def app2 = """
+ package test
+
+ object Test extends App {
+ def test(): Unit = {
+ println("testing differently...")
+ }
+ }"""
+
+ def app3 = """
+ package test
+
+ object Test3 extends App {
+ def test(): Unit = {
+ println("new object in existing package")
+ }
+ }"""
+
+ def test1(): Unit = {
+ val jar = "test1.jar"
+ compileCode(app1, jar)
+
+ val output = ILoop.run(List(s":require ${testOutput.path}/$jar", "test.Test.test()"))
+ val lines = output.split("\n")
+ val res1 = lines(4).contains("Added") && lines(4).contains("test1.jar")
+ val res2 = lines(lines.length-3).contains("testing...")
+
+ println(s"test1 res1: $res1")
+ println(s"test1 res2: $res2")
+ }
+
+ def test2(): Unit = {
+ // should reject jars with conflicting entries
+ val jar1 = "test1.jar"
+ val jar2 = "test2.jar"
+ compileCode(app2, jar2)
+
+ val output = ILoop.run(List(s":require ${testOutput.path}/$jar1", s":require ${testOutput.path}/$jar2"))
+ val lines = output.split("\n")
+ val res1 = lines(4).contains("Added") && lines(4).contains("test1.jar")
+ val res2 = lines(lines.length-3).contains("test2.jar") && lines(lines.length-3).contains("existing classpath entries conflict")
+
+ println(s"test2 res1: $res1")
+ println(s"test2 res2: $res2")
+ }
+
+ def test3(): Unit = {
+ // should accept jars with overlapping packages, but no conflicts
+ val jar1 = "test1.jar"
+ val jar3 = "test3.jar"
+ compileCode(app3, jar3)
+
+ val output = ILoop.run(List(s":require ${testOutput.path}/$jar1", s":require ${testOutput.path}/$jar3", "test.Test3.test()"))
+ val lines = output.split("\n")
+ val res1 = lines(4).contains("Added") && lines(4).contains("test1.jar")
+ val res2 = lines(lines.length-3).contains("new object in existing package")
+
+ println(s"test3 res1: $res1")
+ println(s"test3 res2: $res2")
+ }
+
+ def test4(): Unit = {
+ // twice the same jar should be rejected
+ val jar1 = "test1.jar"
+ val output = ILoop.run(List(s":require ${testOutput.path}/$jar1", s":require ${testOutput.path}/$jar1"))
+ val lines = output.split("\n")
+ val res1 = lines(4).contains("Added") && lines(4).contains("test1.jar")
+ val res2 = lines(lines.length-3).contains("test1.jar") && lines(lines.length-3).contains("existing classpath entries conflict")
+
+ println(s"test4 res1: $res1")
+ println(s"test4 res2: $res2")
+ }
+
+ def show(): Unit = {
+ test1()
+ test2()
+ test3()
+ test4()
+ }
+}
diff --git a/test/files/run/t7407.flags b/test/files/run/t7407.flags
index c8547a27dc..be4ef0798a 100644
--- a/test/files/run/t7407.flags
+++ b/test/files/run/t7407.flags
@@ -1 +1 @@
--Ynooptimise -Ybackend:GenBCode
+-Ynooptimise -Yopt:l:none -Ybackend:GenBCode
diff --git a/test/files/run/t8253.check b/test/files/run/t8253.check
new file mode 100644
index 0000000000..0b4cb2d1f7
--- /dev/null
+++ b/test/files/run/t8253.check
@@ -0,0 +1,40 @@
+
+<sample xmlns='ns1'/>
+{
+ var $tmpscope: _root_.scala.xml.NamespaceBinding = $scope;
+ $tmpscope = new _root_.scala.xml.NamespaceBinding(null, "ns1", $tmpscope);
+ {
+ val $scope: _root_.scala.xml.NamespaceBinding = $tmpscope;
+ new _root_.scala.xml.Elem(null, "sample", _root_.scala.xml.Null, $scope, true)
+ }
+}
+
+<sample xmlns={identity(ns1)}/>
+{
+ var $tmpscope: _root_.scala.xml.NamespaceBinding = $scope;
+ $tmpscope = new _root_.scala.xml.NamespaceBinding(null, ns1, $tmpscope);
+ {
+ val $scope: _root_.scala.xml.NamespaceBinding = $tmpscope;
+ new _root_.scala.xml.Elem(null, "sample", _root_.scala.xml.Null, $scope, true)
+ }
+}
+
+<sample xmlns:foo='ns1'/>
+{
+ var $tmpscope: _root_.scala.xml.NamespaceBinding = $scope;
+ $tmpscope = new _root_.scala.xml.NamespaceBinding("foo", "ns1", $tmpscope);
+ {
+ val $scope: _root_.scala.xml.NamespaceBinding = $tmpscope;
+ new _root_.scala.xml.Elem(null, "sample", _root_.scala.xml.Null, $scope, true)
+ }
+}
+
+<sample xmlns:foo={identity(ns1)}/>
+{
+ var $tmpscope: _root_.scala.xml.NamespaceBinding = $scope;
+ $tmpscope = new _root_.scala.xml.NamespaceBinding("foo", ns1, $tmpscope);
+ {
+ val $scope: _root_.scala.xml.NamespaceBinding = $tmpscope;
+ new _root_.scala.xml.Elem(null, "sample", _root_.scala.xml.Null, $scope, true)
+ }
+}
diff --git a/test/files/run/t8253.scala b/test/files/run/t8253.scala
new file mode 100644
index 0000000000..c4800b4491
--- /dev/null
+++ b/test/files/run/t8253.scala
@@ -0,0 +1,14 @@
+object Test extends App {
+ import reflect.runtime.universe._ // not using the XML library in compiler tests
+
+ def show(code: String, t: Tree) = println(s"\n$code\n$t")
+
+ val ns1 = "ns1"
+ show("<sample xmlns='ns1'/>", q"<sample xmlns='ns1'/>")
+ show("<sample xmlns={identity(ns1)}/>", q"<sample xmlns={ns1}/>")
+ show("<sample xmlns:foo='ns1'/>", q"<sample xmlns:foo='ns1'/>")
+ show("<sample xmlns:foo={identity(ns1)}/>", q"<sample xmlns:foo={ns1}/>")
+
+ // `identity(foo)` used to match the overly permissive match in SymbolXMLBuilder
+ // which was intented to more specifically match `_root_.scala.xml.Text(...)`
+}
diff --git a/test/files/run/t8925.check b/test/files/run/t8925.check
new file mode 100644
index 0000000000..112e7005df
--- /dev/null
+++ b/test/files/run/t8925.check
@@ -0,0 +1,2 @@
+bar
+abcd
diff --git a/test/files/run/t8925.flags b/test/files/run/t8925.flags
new file mode 100644
index 0000000000..be4ef0798a
--- /dev/null
+++ b/test/files/run/t8925.flags
@@ -0,0 +1 @@
+-Ynooptimise -Yopt:l:none -Ybackend:GenBCode
diff --git a/test/files/run/t8925.scala b/test/files/run/t8925.scala
new file mode 100644
index 0000000000..33f4505f03
--- /dev/null
+++ b/test/files/run/t8925.scala
@@ -0,0 +1,31 @@
+object Ex {
+ def unapply(t: Throwable): Option[Throwable] = Some(t)
+}
+
+class A {
+ var x = ""
+
+ def bar =
+ try {
+ "bar"
+ } finally {
+ try {
+ x += "a"
+ } finally {
+ x += "b"
+ try {
+ x += "c"
+ throw null
+ } catch {
+ case Ex(_) =>
+ x += "d"
+ }
+ }
+ }
+}
+
+object Test extends App {
+ val a = new A
+ println(a.bar)
+ println(a.x)
+}
diff --git a/test/files/run/t8946.scala b/test/files/run/t8946.scala
new file mode 100644
index 0000000000..a248a20501
--- /dev/null
+++ b/test/files/run/t8946.scala
@@ -0,0 +1,29 @@
+// Tests to assert that references to threads are not strongly held when scala-reflection is used inside of them.
+object Test {
+ import scala.ref.WeakReference
+
+ def forceGc() = {
+ var obj = new Object
+ val ref = new WeakReference(obj)
+ obj = null;
+ while(ref.get.nonEmpty)
+ Array.ofDim[Byte](16 * 1024 * 1024)
+ }
+
+ def main(args: Array[String]): Unit = {
+ val threads = for (i <- (1 to 16)) yield {
+ val t = new Thread {
+ override def run(): Unit = {
+ import reflect.runtime.universe._
+ typeOf[List[String]] <:< typeOf[Seq[_]]
+ }
+ }
+ t.start()
+ t.join()
+ WeakReference(t)
+ }
+ forceGc()
+ val nonGCdThreads = threads.filter(_.get.nonEmpty).length
+ assert(nonGCdThreads == 0, s"${nonGCdThreads} threads were retained; expected 0.")
+ }
+}
diff --git a/test/files/t8449/Client.scala b/test/files/t8449/Client.scala
new file mode 100644
index 0000000000..5d273f06b2
--- /dev/null
+++ b/test/files/t8449/Client.scala
@@ -0,0 +1,3 @@
+object Client {
+ def foo: Any = new Test().foo
+}
diff --git a/test/files/t8449/Test.java b/test/files/t8449/Test.java
new file mode 100644
index 0000000000..ecb1711b24
--- /dev/null
+++ b/test/files/t8449/Test.java
@@ -0,0 +1,10 @@
+public class Test {
+ // Raw type over a Scala type constructor
+ public scala.Function1 foo() { return null; }
+ // scalac reported:
+ // % scalac-hash v2.11.2 -d /tmp sandbox/{Test.java,Client.scala}
+ // sandbox/Test.java:2: error: trait Function1 takes type parameters
+ // public scala.Function1 foo() { return null; }
+ // ^
+ // one error found
+}
diff --git a/test/junit/scala/collection/IndexedSeqOptimizedTest.scala b/test/junit/scala/collection/IndexedSeqOptimizedTest.scala
index e5382907af..419e1454cb 100644
--- a/test/junit/scala/collection/IndexedSeqOptimizedTest.scala
+++ b/test/junit/scala/collection/IndexedSeqOptimizedTest.scala
@@ -13,4 +13,17 @@ class IndexedSeqOptimizedTest {
assertEquals(0, (Array(2): collection.mutable.WrappedArray[Int]).lastIndexWhere(_ => true, 1))
assertEquals(2, "abc123".lastIndexWhere(_.isLetter, 6))
}
+
+ @Test
+ def hasCorrectDropAndTakeMethods() {
+ assertEquals("", "abc" take Int.MinValue)
+ assertEquals("", "abc" takeRight Int.MinValue)
+ assertEquals("abc", "abc" drop Int.MinValue)
+ assertEquals("abc", "abc" dropRight Int.MinValue)
+
+ assertArrayEquals(Array.empty[Int], Array(1, 2, 3) take Int.MinValue)
+ assertArrayEquals(Array.empty[Int], Array(1, 2, 3) takeRight Int.MinValue)
+ assertArrayEquals(Array(1, 2, 3), Array(1, 2, 3) drop Int.MinValue)
+ assertArrayEquals(Array(1, 2, 3), Array(1, 2, 3) dropRight Int.MinValue)
+ }
}
diff --git a/test/junit/scala/collection/IterableViewLikeTest.scala b/test/junit/scala/collection/IterableViewLikeTest.scala
new file mode 100644
index 0000000000..55da02744b
--- /dev/null
+++ b/test/junit/scala/collection/IterableViewLikeTest.scala
@@ -0,0 +1,20 @@
+package scala.collection
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class IterableViewLikeTest {
+
+ @Test
+ def hasCorrectDropAndTakeMethods() {
+ val iter = Iterable(1, 2, 3)
+
+ assertEquals(Iterable.empty[Int], iter.view take Int.MinValue force)
+ assertEquals(Iterable.empty[Int], iter.view takeRight Int.MinValue force)
+ assertEquals(iter, iter.view drop Int.MinValue force)
+ assertEquals(iter, iter.view dropRight Int.MinValue force)
+ }
+}
diff --git a/test/junit/scala/collection/IteratorTest.scala b/test/junit/scala/collection/IteratorTest.scala
index b7a9805c9f..d5389afd0c 100644
--- a/test/junit/scala/collection/IteratorTest.scala
+++ b/test/junit/scala/collection/IteratorTest.scala
@@ -6,11 +6,14 @@ import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
+import scala.tools.testing.AssertUtil._
+
+import Seq.empty
+
@RunWith(classOf[JUnit4])
class IteratorTest {
- @Test
- def groupedIteratorShouldNotAskForUnneededElement(): Unit = {
+ @Test def groupedIteratorShouldNotAskForUnneededElement(): Unit = {
var counter = 0
val it = new Iterator[Int] { var i = 0 ; def hasNext = { counter = i; true } ; def next = { i += 1; i } }
val slidingIt = it sliding 2
@@ -25,4 +28,130 @@ class IteratorTest {
slidingIt.next
assertEquals("Counter should be one, that means we didn't look further than needed", 1, counter)
}
+
+ @Test def dropDoesNotGrowStack(): Unit = {
+ def it = new Iterator[Throwable] { def hasNext = true ; def next = new Throwable }
+
+ assertEquals(it.drop(1).next.getStackTrace.length, it.drop(1).drop(1).next.getStackTrace.length)
+ }
+
+ @Test def dropIsChainable(): Unit = {
+ assertSameElements(1 to 4, Iterator from 0 take 5 drop 1)
+ assertSameElements(3 to 4, Iterator from 0 take 5 drop 3)
+ assertSameElements(empty, Iterator from 0 take 5 drop 5)
+ assertSameElements(empty, Iterator from 0 take 5 drop 10)
+ assertSameElements(0 to 4, Iterator from 0 take 5 drop 0)
+ assertSameElements(0 to 4, Iterator from 0 take 5 drop -1)
+ assertSameElements(2 to 8 by 2, Iterator from 0 take 5 drop 1 map (2 * _))
+ assertSameElements(2 to 8 by 2, Iterator from 0 take 5 map (2 * _) drop 1)
+ assertSameElements(3 to 4, Iterator from 0 take 5 drop 1 drop 2)
+ assertSameElements(3 to 4, Iterator from 0 take 5 drop 2 drop 1)
+ }
+
+ @Test def sliceIsChainable(): Unit = {
+ assertSameElements(3 to 6, Iterator from 0 slice (3, 7))
+ assertSameElements(empty, Iterator from 0 slice (3, 3))
+ assertSameElements(0 to 2, Iterator from 0 slice (-1, 3))
+ assertSameElements(empty, Iterator from 0 slice (3, -1))
+ assertSameElements(6 to 12 by 2, Iterator from 0 slice (3, 7) map (2 * _))
+ assertSameElements(6 to 12 by 2, Iterator from 0 map (2 * _) slice (3, 7))
+ assertSameElements(4 to 6, Iterator from 0 slice (3, 7) drop 1)
+ assertSameElements(4 to 7, Iterator from 0 drop 1 slice (3, 7))
+ assertSameElements(4 to 5, Iterator from 0 slice (3, 7) slice (1, 3))
+ assertSameElements(4 to 6, Iterator from 0 slice (3, 7) slice (1, 10))
+ }
+
+ // test/files/run/iterator-concat.scala
+ @Test def concatIsStackFriendly(): Unit = {
+ // Create `size` Function0s, each of which evaluates to an Iterator
+ // which produces 1. Then fold them over ++ to get a single iterator,
+ // which should sum to "size".
+ def mk(size: Int): Iterator[Int] = {
+ //val closures = (1 to size).toList.map(x => (() => Iterator(1)))
+ //closures.foldLeft(Iterator.empty: Iterator[Int])((res, f) => res ++ f())
+ List.fill(size)(() => Iterator(1)).foldLeft(Iterator.empty: Iterator[Int])((res, f) => res ++ f())
+ }
+ assertEquals(100, mk(100).sum)
+ assertEquals(1000, mk(1000).sum)
+ assertEquals(10000, mk(10000).sum)
+ assertEquals(100000, mk(100000).sum)
+ }
+
+ @Test def from(): Unit = {
+ val it1 = Iterator.from(-1)
+ val it2 = Iterator.from(0, -1)
+ assertEquals(-1, it1.next())
+ assertEquals(0, it2.next())
+ }
+ @Test def range(): Unit = {
+ assertEquals(5, Iterator.range(0, 10, 2).size)
+ assertEquals(0, Iterator.range(0, 10, -2).size)
+ assertEquals(5, Iterator.range(10, 0, -2).size)
+ assertEquals(0, Iterator.range(10, 0, 2).size)
+ assertEquals(1, Iterator.range(0, 10, 11).size)
+ assertEquals(10, Iterator.range(0, 10, 1).size)
+ assertEquals(10, Iterator.range(10, 0, -1).size)
+ }
+ @Test def range3(): Unit = {
+ val r1 = Iterator.range(0, 10)
+ assertTrue(r1 contains 5)
+ assertTrue(r1 contains 6)
+ assertFalse(r1 contains 4)
+ val r2a = Iterator.range(0, 10, 2)
+ assertFalse(r2a contains 5)
+ val r2b = Iterator.range(0, 10, 2)
+ assertTrue(r2b contains 6)
+ val r3 = Iterator.range(0, 10, 11)
+ assertFalse(r3 contains 5)
+ assertTrue(r3.isEmpty)
+ }
+ @Test def take(): Unit = {
+ assertEquals(10, (Iterator from 0 take 10).size)
+ }
+ @Test def foreach(): Unit = {
+ val it1 = Iterator.from(0) take 20
+ var n = 0
+ it1 foreach { n += _ }
+ assertEquals(190, n)
+ }
+ // ticket #429
+ @Test def fromArray(): Unit = {
+ val a = List(1, 2, 3, 4).toArray
+ var xs0 = a.iterator.toList;
+ var xs1 = a.slice(0, 1).iterator
+ var xs2 = a.slice(0, 2).iterator
+ var xs3 = a.slice(0, 3).iterator
+ var xs4 = a.slice(0, 4).iterator
+ assertEquals(14, xs0.size + xs1.size + xs2.size + xs3.size + xs4.size)
+ }
+ @Test def toSeq(): Unit = {
+ assertEquals("1x2x3x4x5", List(1, 2, 3, 4, 5).iterator.mkString("x"))
+ }
+ @Test def indexOf(): Unit = {
+ assertEquals(3, List(1, 2, 3, 4, 5).iterator.indexOf(4))
+ assertEquals(-1, List(1, 2, 3, 4, 5).iterator.indexOf(16))
+ }
+ @Test def indexWhere(): Unit = {
+ assertEquals(3, List(1, 2, 3, 4, 5).iterator.indexWhere { x: Int => x >= 4 })
+ assertEquals(-1, List(1, 2, 3, 4, 5).iterator.indexWhere { x: Int => x >= 16 })
+ }
+ // iterator-iterate-lazy.scala
+ // was java.lang.UnsupportedOperationException: tail of empty list
+ @Test def iterateIsSufficientlyLazy(): Unit = {
+ //Iterator.iterate((1 to 5).toList)(_.tail).takeWhile(_.nonEmpty).toList // suffices
+ Iterator.iterate((1 to 5).toList)(_.tail).takeWhile(_.nonEmpty).map(_.head).toList
+ }
+ // SI-3516
+ @Test def toStreamIsSufficientlyLazy(): Unit = {
+ val results = collection.mutable.ListBuffer.empty[Int]
+ def mkIterator = (1 to 5).iterator map (x => { results += x ; x })
+ def mkInfinite = Iterator continually { results += 1 ; 1 }
+
+ // Stream is strict in its head so we should see 1 from each of them.
+ val s1 = mkIterator.toStream
+ val s2 = mkInfinite.toStream
+ // back and forth without slipping into nontermination.
+ results += (Stream from 1).toIterator.drop(10).toStream.drop(10).toIterator.next()
+ assertSameElements(List(1,1,21), results)
+ }
}
diff --git a/test/junit/scala/collection/immutable/TreeMapTest.scala b/test/junit/scala/collection/immutable/TreeMapTest.scala
new file mode 100644
index 0000000000..4c21b94b24
--- /dev/null
+++ b/test/junit/scala/collection/immutable/TreeMapTest.scala
@@ -0,0 +1,20 @@
+package scala.collection.immutable
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class TreeMapTest {
+
+ @Test
+ def hasCorrectDropAndTakeMethods() {
+ val tree = TreeMap(1 -> "a", 2 -> "b", 3 -> "c")
+
+ assertEquals(TreeMap.empty[Int, String], tree take Int.MinValue)
+ assertEquals(TreeMap.empty[Int, String], tree takeRight Int.MinValue)
+ assertEquals(tree, tree drop Int.MinValue)
+ assertEquals(tree, tree dropRight Int.MinValue)
+ }
+}
diff --git a/test/junit/scala/collection/immutable/TreeSetTest.scala b/test/junit/scala/collection/immutable/TreeSetTest.scala
new file mode 100644
index 0000000000..8efe1bfeb8
--- /dev/null
+++ b/test/junit/scala/collection/immutable/TreeSetTest.scala
@@ -0,0 +1,20 @@
+package scala.collection.immutable
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class TreeSetTest {
+
+ @Test
+ def hasCorrectDropAndTakeMethods() {
+ val set = TreeSet(1, 2, 3)
+
+ assertEquals(TreeSet.empty[Int], set take Int.MinValue)
+ assertEquals(TreeSet.empty[Int], set takeRight Int.MinValue)
+ assertEquals(set, set drop Int.MinValue)
+ assertEquals(set, set dropRight Int.MinValue)
+ }
+}
diff --git a/test/junit/scala/tools/testing/AssertUtil.scala b/test/junit/scala/tools/testing/AssertUtil.scala
index 9b4833d46b..83a637783f 100644
--- a/test/junit/scala/tools/testing/AssertUtil.scala
+++ b/test/junit/scala/tools/testing/AssertUtil.scala
@@ -1,6 +1,11 @@
package scala.tools
package testing
+import org.junit.Assert
+import Assert.fail
+import scala.runtime.ScalaRunTime.stringOf
+import scala.collection.{ GenIterable, IterableLike }
+
/** This module contains additional higher-level assert statements
* that are ultimately based on junit.Assert primitives.
*/
@@ -21,6 +26,19 @@ object AssertUtil {
throw e
else return
}
- throw new AssertionError("Expression did not throw!")
+ fail("Expression did not throw!")
}
+
+ /** JUnit-style assertion for `IterableLike.sameElements`.
+ */
+ def assertSameElements[A, B >: A](expected: IterableLike[A, _], actual: GenIterable[B], message: String = ""): Unit =
+ if (!(expected sameElements actual))
+ fail(
+ f"${ if (message.nonEmpty) s"$message " else "" }expected:<${ stringOf(expected) }> but was:<${ stringOf(actual) }>"
+ )
+
+ /** Convenient for testing iterators.
+ */
+ def assertSameElements[A, B >: A](expected: IterableLike[A, _], actual: Iterator[B]): Unit =
+ assertSameElements(expected, actual.toList, "")
}