summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala5
-rw-r--r--src/compiler/scala/tools/nsc/doc/Settings.scala20
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala10
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/IMain.scala2
-rw-r--r--src/compiler/scala/tools/nsc/matching/Patterns.scala2
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala115
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala34
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala17
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala487
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala4
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java2
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java30
-rw-r--r--src/library/scala/collection/TraversableOnce.scala14
-rw-r--r--src/library/scala/collection/generic/IsTraversableLike.scala101
-rw-r--r--src/library/scala/collection/immutable/Range.scala1
-rw-r--r--src/library/scala/util/Properties.scala2
-rw-r--r--src/manual/scala/man1/scaladoc.scala4
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala60
-rw-r--r--src/reflect/scala/reflect/internal/settings/MutableSettings.scala2
-rw-r--r--src/reflect/scala/reflect/runtime/Settings.scala1
-rw-r--r--src/swing/scala/swing/SwingActor.scala22
-rw-r--r--src/swing/scala/swing/SwingWorker.scala4
25 files changed, 523 insertions, 430 deletions
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index 5cb43575b8..96146b7343 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -253,6 +253,11 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
}
}
+ // drop annotations generated by CPS plugin etc, since its annotationchecker rejects T @cps[U] <: Any
+ // let's assume for now annotations don't affect casts, drop them there, and bring them back using the outer Typed tree
+ def mkCastPreservingAnnotations(tree: Tree, pt: Type) =
+ Typed(mkCast(tree, pt.withoutAnnotations.dealias), TypeTree(pt))
+
/** Generate a cast for tree Tree representing Array with
* elem type elemtp to expected type pt.
*/
diff --git a/src/compiler/scala/tools/nsc/doc/Settings.scala b/src/compiler/scala/tools/nsc/doc/Settings.scala
index 10a0d8d879..02630a99b2 100644
--- a/src/compiler/scala/tools/nsc/doc/Settings.scala
+++ b/src/compiler/scala/tools/nsc/doc/Settings.scala
@@ -7,8 +7,6 @@ package scala.tools.nsc
package doc
import java.io.File
-import java.net.URI
-import java.lang.System
import scala.language.postfixOps
/** An extended version of compiler settings, with additional Scaladoc-specific options.
@@ -72,10 +70,10 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
""
)
- val docExternalUris = MultiStringSetting (
- "-doc-external-uris",
+ val docExternalDoc = MultiStringSetting (
+ "-doc-external-doc",
"external-doc",
- "comma-separated list of file://classpath_entry_path#doc_URL URIs for external dependencies"
+ "comma-separated list of classpath_entry_path#doc_URL pairs describing external dependencies."
)
val useStupidTypes = BooleanSetting (
@@ -265,9 +263,15 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
map ++ (pkgs map (_ -> url))
}
- lazy val extUrlMapping: Map[String, String] = docExternalUris.value map { s =>
- val uri = new URI(s)
- uri.getSchemeSpecificPart -> appendIndex(uri.getFragment)
+ lazy val extUrlMapping: Map[String, String] = docExternalDoc.value flatMap { s =>
+ val idx = s.indexOf("#")
+ if (idx > 0) {
+ val (first, last) = s.splitAt(idx)
+ Some(new File(first).getAbsolutePath -> appendIndex(last.substring(1)))
+ } else {
+ error(s"Illegal -doc-external-doc option; expected a pair with '#' separator, found: '$s'")
+ None
+ }
} toMap
/**
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
index ff73745972..1323a06c01 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
@@ -339,7 +339,7 @@ function configureTextFilter() {
printAlphabet();
var input = $("#textfilter input");
resizeFilterBlock();
- input.bind("keydown", function(event) {
+ input.bind('keyup', function(event) {
if (event.keyCode == 27) { // escape
input.attr("value", "");
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala b/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala
index 13880bb8af..d14b5c79e0 100644
--- a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala
@@ -37,8 +37,8 @@ trait CompletionOutput {
val pkg = method.ownerChain find (_.isPackageClass) map (_.fullName) getOrElse ""
def relativize(str: String): String = quietString(str stripPrefix (pkg + "."))
- def relativize(tp: Type): String = relativize(tp.normalize.toString)
- def relativize(sym: Symbol): String = relativize(sym.info)
+ def relativize(tp: Type): String = relativize(tp.dealiasWiden.toString)
+ def relativize(sym: Symbol): String = relativize(sym.info)
def braceList(tparams: List[String]) = if (tparams.isEmpty) "" else (tparams map relativize).mkString("[", ", ", "]")
def parenList(params: List[Any]) = params.mkString("(", ", ", ")")
@@ -56,8 +56,8 @@ trait CompletionOutput {
}
)
- def tupleString(tp: Type) = parenList(tp.normalize.typeArgs map relativize)
- def functionString(tp: Type) = tp.normalize.typeArgs match {
+ def tupleString(tp: Type) = parenList(tp.dealiasWiden.typeArgs map relativize)
+ def functionString(tp: Type) = tp.dealiasWiden.typeArgs match {
case List(t, r) => t + " => " + r
case xs => parenList(xs.init) + " => " + xs.last
}
@@ -65,7 +65,7 @@ trait CompletionOutput {
def tparamsString(tparams: List[Symbol]) = braceList(tparams map (_.defString))
def paramsString(params: List[Symbol]) = {
def paramNameString(sym: Symbol) = if (sym.isSynthetic) "" else sym.nameString + ": "
- def paramString(sym: Symbol) = paramNameString(sym) + typeToString(sym.info.normalize)
+ def paramString(sym: Symbol) = paramNameString(sym) + typeToString(sym.info.dealiasWiden)
val isImplicit = params.nonEmpty && params.head.isImplicit
val strs = (params map paramString) match {
diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
index 2b97f81024..b46d28dec3 100644
--- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
@@ -550,7 +550,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
// normalize non-public types so we don't see protected aliases like Self
def normalizeNonPublic(tp: Type) = tp match {
- case TypeRef(_, sym, _) if sym.isAliasType && !sym.isPublic => tp.normalize
+ case TypeRef(_, sym, _) if sym.isAliasType && !sym.isPublic => tp.dealias
case _ => tp
}
diff --git a/src/compiler/scala/tools/nsc/matching/Patterns.scala b/src/compiler/scala/tools/nsc/matching/Patterns.scala
index 99b72fa26e..f116a7c4c7 100644
--- a/src/compiler/scala/tools/nsc/matching/Patterns.scala
+++ b/src/compiler/scala/tools/nsc/matching/Patterns.scala
@@ -402,7 +402,7 @@ trait Patterns extends ast.TreeDSL {
case _ => toPats(args)
}
- def resTypes = analyzer.unapplyTypeList(unfn.symbol, unfn.tpe, args.length)
+ def resTypes = analyzer.unapplyTypeList(unfn.pos, unfn.symbol, unfn.tpe, args.length)
def resTypesString = resTypes match {
case Nil => "Boolean"
case xs => xs.mkString(", ")
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index cfc7f14210..b820d10ddc 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -85,7 +85,8 @@ trait ScalaSettings extends AbsScalaSettings
val logFreeTerms = BooleanSetting ("-Xlog-free-terms", "Print a message when reification creates a free term.")
val logFreeTypes = BooleanSetting ("-Xlog-free-types", "Print a message when reification resorts to generating a free type.")
val maxClassfileName = IntSetting ("-Xmax-classfile-name", "Maximum filename length for generated classes", 255, Some((72, 255)), _ => None)
- val Xmigration28 = BooleanSetting ("-Xmigration", "Warn about constructs whose behavior may have changed between 2.7 and 2.8.")
+ val Xmigration28 = BooleanSetting ("-Xmigration", "Warn about constructs whose behavior may have changed between 2.7 and 2.8.").
+ withDeprecationMessage("This setting is no longer useful and will be removed. Please remove it from your build.")
val nouescape = BooleanSetting ("-Xno-uescape", "Disable handling of \\u unicode escapes.")
val Xnojline = BooleanSetting ("-Xnojline", "Do not use JLine for editing.")
val Xverify = BooleanSetting ("-Xverify", "Verify generic signatures in generated bytecode (asm backend only.)")
@@ -174,6 +175,7 @@ trait ScalaSettings extends AbsScalaSettings
val etaExpandKeepsStar = BooleanSetting ("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.")
val Yinvalidate = StringSetting ("-Yinvalidate", "classpath-entry", "Invalidate classpath entry before run", "")
val noSelfCheck = BooleanSetting ("-Yno-self-type-checks", "Suppress check for self-type conformance among inherited members.")
+ val companionsInPkgObjs = BooleanSetting("-Ycompanions-in-pkg-objs", "Allow companion objects and case classes in package objects. See issue SI-5954.")
val YvirtClasses = false // too embryonic to even expose as a -Y //BooleanSetting ("-Yvirtual-classes", "Support virtual classes")
val exposeEmptyPackage = BooleanSetting("-Yexpose-empty-package", "Internal only: expose the empty package.").internalOnly()
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index 838ea7d5a0..9908bd689e 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -613,7 +613,7 @@ abstract class UnCurry extends InfoTransform
val fn1 = withInPattern(false)(transform(fn))
val args1 = transformTrees(fn.symbol.name match {
case nme.unapply => args
- case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, analyzer.unapplyTypeList(fn.symbol, fn.tpe, args.length))
+ case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, analyzer.unapplyTypeList(fn.pos, fn.symbol, fn.tpe, args.length))
case _ => sys.error("internal error: UnApply node has wrong symbol")
})
treeCopy.UnApply(tree, fn1, args1)
@@ -621,11 +621,13 @@ abstract class UnCurry extends InfoTransform
case Apply(fn, args) =>
if (fn.symbol == Object_synchronized && shouldBeLiftedAnyway(args.head))
transform(treeCopy.Apply(tree, fn, List(liftTree(args.head))))
- else
- withNeedLift(true) {
+ else {
+ val needLift = needTryLift || !fn.symbol.isLabel // SI-6749, no need to lift in args to label jumps.
+ withNeedLift(needLift) {
val formals = fn.tpe.paramTypes
treeCopy.Apply(tree, transform(fn), transformTrees(transformArgs(tree.pos, fn.symbol, args, formals)))
}
+ }
case Assign(Select(_, _), _) =>
withNeedLift(true) { super.transform(tree) }
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 70a736c91f..ec3a0a0ef7 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -443,8 +443,8 @@ trait Implicits {
val start = if (Statistics.canEnable) Statistics.startTimer(matchesPtNanos) else null
val result = normSubType(tp, pt) || isView && {
pt match {
- case TypeRef(_, Function1.Sym, args) =>
- matchesPtView(tp, args.head, args.tail.head, undet)
+ case TypeRef(_, Function1.Sym, arg1 :: arg2 :: Nil) =>
+ matchesPtView(tp, arg1, arg2, undet)
case _ =>
false
}
@@ -488,7 +488,7 @@ trait Implicits {
loop(restpe, pt)
else pt match {
case tr @ TypeRef(pre, sym, args) =>
- if (sym.isAliasType) loop(tp, pt.normalize)
+ if (sym.isAliasType) loop(tp, pt.dealias)
else if (sym.isAbstractType) loop(tp, pt.bounds.lo)
else {
val len = args.length - 1
@@ -532,18 +532,15 @@ trait Implicits {
* to a final true or false.
*/
private def isPlausiblySubType(tp1: Type, tp2: Type) = !isImpossibleSubType(tp1, tp2)
- private def isImpossibleSubType(tp1: Type, tp2: Type) = tp1.normalize.widen match {
- case tr1 @ TypeRef(_, sym1, _) =>
- // We can only rule out a subtype relationship if the left hand
- // side is a class, else we may not know enough.
- sym1.isClass && (tp2.normalize.widen match {
- case TypeRef(_, sym2, _) =>
- sym2.isClass && !(sym1 isWeakSubClass sym2)
- case RefinedType(parents, decls) =>
- decls.nonEmpty &&
- tr1.member(decls.head.name) == NoSymbol
- case _ => false
- })
+ private def isImpossibleSubType(tp1: Type, tp2: Type) = tp1.dealiasWiden match {
+ // We can only rule out a subtype relationship if the left hand
+ // side is a class, else we may not know enough.
+ case tr1 @ TypeRef(_, sym1, _) if sym1.isClass =>
+ tp2.dealiasWiden match {
+ case TypeRef(_, sym2, _) => sym2.isClass && !(sym1 isWeakSubClass sym2)
+ case RefinedType(parents, decls) => decls.nonEmpty && tr1.member(decls.head.name) == NoSymbol
+ case _ => false
+ }
case _ => false
}
@@ -959,7 +956,7 @@ trait Implicits {
infoMap get sym match {
case Some(infos1) =>
if (infos1.nonEmpty && !(pre =:= infos1.head.pre.prefix)) {
- println("amb prefix: "+pre+"#"+sym+" "+infos1.head.pre.prefix+"#"+sym)
+ log(s"Ignoring implicit members of $pre#$sym as it is also visible via another prefix: ${infos1.head.pre.prefix}")
infoMap(sym) = List() // ambiguous prefix - ignore implicit members
}
case None =>
@@ -1018,7 +1015,7 @@ trait Implicits {
args foreach (getParts(_))
}
} else if (sym.isAliasType) {
- getParts(tp.normalize)
+ getParts(tp.dealias)
} else if (sym.isAbstractType) {
getParts(tp.bounds.hi)
}
@@ -1049,88 +1046,6 @@ trait Implicits {
infoMap
}
- /** The parts of a type is the smallest set of types that contains
- * - the type itself
- * - the parts of its immediate components (prefix and argument)
- * - the parts of its base types
- * - for alias types and abstract types, we take instead the parts
- * - of their upper bounds.
- * @return For those parts that refer to classes with companion objects that
- * can be accessed with unambiguous stable prefixes, the implicits infos
- * which are members of these companion objects.
-
- private def companionImplicits(tp: Type): Infoss = {
- val partMap = new LinkedHashMap[Symbol, Type]
- val seen = mutable.HashSet[Type]() // cycle detection
-
- /** Enter all parts of `tp` into `parts` set.
- * This method is performance critical: about 2-4% of all type checking is spent here
- */
- def getParts(tp: Type) {
- if (seen(tp))
- return
- seen += tp
- tp match {
- case TypeRef(pre, sym, args) =>
- if (sym.isClass) {
- if (!((sym.name == tpnme.REFINE_CLASS_NAME) ||
- (sym.name startsWith tpnme.ANON_CLASS_NAME) ||
- (sym.name == tpnme.ROOT)))
- partMap get sym match {
- case Some(pre1) =>
- if (!(pre =:= pre1)) partMap(sym) = NoType // ambiguous prefix - ignore implicit members
- case None =>
- if (pre.isStable) partMap(sym) = pre
- val bts = tp.baseTypeSeq
- var i = 1
- while (i < bts.length) {
- getParts(bts(i))
- i += 1
- }
- getParts(pre)
- args foreach getParts
- }
- } else if (sym.isAliasType) {
- getParts(tp.normalize)
- } else if (sym.isAbstractType) {
- getParts(tp.bounds.hi)
- }
- case ThisType(_) =>
- getParts(tp.widen)
- case _: SingletonType =>
- getParts(tp.widen)
- case RefinedType(ps, _) =>
- for (p <- ps) getParts(p)
- case AnnotatedType(_, t, _) =>
- getParts(t)
- case ExistentialType(_, t) =>
- getParts(t)
- case PolyType(_, t) =>
- getParts(t)
- case _ =>
- }
- }
-
- getParts(tp)
-
- val buf = new ListBuffer[Infos]
- for ((clazz, pre) <- partMap) {
- if (pre != NoType) {
- val companion = clazz.companionModule
- companion.moduleClass match {
- case mc: ModuleClassSymbol =>
- buf += (mc.implicitMembers map (im =>
- new ImplicitInfo(im.name, singleType(pre, companion), im)))
- case _ =>
- }
- }
- }
- //println("companion implicits of "+tp+" = "+buf.toList) // DEBUG
- buf.toList
- }
-
-*/
-
/** The implicits made available by type `pt`.
* These are all implicits found in companion objects of classes C
* such that some part of `tp` has C as one of its superclasses.
@@ -1258,7 +1173,7 @@ trait Implicits {
implicit def wrapResult(tree: Tree): SearchResult =
if (tree == EmptyTree) SearchFailure else new SearchResult(tree, if (from.isEmpty) EmptyTreeTypeSubstituter else new TreeTypeSubstituter(from, to))
- val tp1 = tp0.normalize
+ val tp1 = tp0.dealias
tp1 match {
case ThisType(_) | SingleType(_, _) =>
// can't generate a reference to a value that's abstracted over by an existential
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 7ae8923e43..2c2aa03d24 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -44,7 +44,7 @@ trait Infer extends Checkable {
case formal => formal
} else formals
if (isVarArgTypes(formals1) && (removeRepeated || formals.length != nargs)) {
- val ft = formals1.last.normalize.typeArgs.head
+ val ft = formals1.last.dealiasWiden.typeArgs.head
formals1.init ::: (for (i <- List.range(formals1.length - 1, nargs)) yield ft)
} else formals1
}
@@ -73,7 +73,7 @@ trait Infer extends Checkable {
* n > 1 and unapply’s result type is Option[(T1, ..., Tn)], for some types T1, ..., Tn.
* the argument patterns p1, ..., pn are typed in turn with expected types T1, ..., Tn
*/
- def extractorFormalTypes(resTp: Type, nbSubPats: Int, unappSym: Symbol): (List[Type], List[Type]) = {
+ def extractorFormalTypes(pos: Position, resTp: Type, nbSubPats: Int, unappSym: Symbol): (List[Type], List[Type]) = {
val isUnapplySeq = unappSym.name == nme.unapplySeq
val booleanExtractor = resTp.typeSymbolDirect == BooleanClass
@@ -87,11 +87,18 @@ trait Infer extends Checkable {
if (nbSubPats == 0 && booleanExtractor && !isUnapplySeq) Nil
else resTp.baseType(OptionClass).typeArgs match {
case optionTArg :: Nil =>
- if (nbSubPats == 1)
+ def productArgs = getProductArgs(optionTArg)
+ if (nbSubPats == 1) {
if (isUnapplySeq) List(seqToRepeatedChecked(optionTArg))
- else List(optionTArg)
+ else {
+ val productArity = productArgs.size
+ if (productArity > 1 && settings.lint.value)
+ global.currentUnit.warning(pos, s"extractor pattern binds a single value to a Product${productArity} of type ${optionTArg}")
+ List(optionTArg)
+ }
+ }
// TODO: update spec to reflect we allow any ProductN, not just TupleN
- else getProductArgs(optionTArg) match {
+ else productArgs match {
case Nil if isUnapplySeq => List(seqToRepeatedChecked(optionTArg))
case tps if isUnapplySeq => tps.init :+ seqToRepeatedChecked(tps.last)
case tps => tps
@@ -1060,15 +1067,17 @@ trait Infer extends Checkable {
*/
def inferExprInstance(tree: Tree, tparams: List[Symbol], pt: Type = WildcardType, treeTp0: Type = null, keepNothings: Boolean = true, useWeaklyCompatible: Boolean = false): List[Symbol] = {
val treeTp = if(treeTp0 eq null) tree.tpe else treeTp0 // can't refer to tree in default for treeTp0
+ val (targs, tvars) = exprTypeArgs(tparams, treeTp, pt, useWeaklyCompatible)
printInference(
ptBlock("inferExprInstance",
"tree" -> tree,
"tree.tpe"-> tree.tpe,
"tparams" -> tparams,
- "pt" -> pt
+ "pt" -> pt,
+ "targs" -> targs,
+ "tvars" -> tvars
)
)
- val (targs, tvars) = exprTypeArgs(tparams, treeTp, pt, useWeaklyCompatible)
if (keepNothings || (targs eq null)) { //@M: adjustTypeArgs fails if targs==null, neg/t0226
substExpr(tree, tparams, targs, pt)
@@ -1422,9 +1431,9 @@ trait Infer extends Checkable {
}
object approximateAbstracts extends TypeMap {
- def apply(tp: Type): Type = tp.normalize match {
+ def apply(tp: Type): Type = tp.dealiasWiden match {
case TypeRef(pre, sym, _) if sym.isAbstractType => WildcardType
- case _ => mapOver(tp)
+ case _ => mapOver(tp)
}
}
@@ -1510,6 +1519,13 @@ trait Infer extends Checkable {
} else if (!competing.isEmpty) {
if (noAlternatives) NoBestExprAlternativeError(tree, pt, isSecondTry)
else if (!pt.isErroneous) AmbiguousExprAlternativeError(tree, pre, best, competing.head, pt, isSecondTry)
+ else {
+ // SI-6912 Don't give up and leave an OverloadedType on the tree.
+ // Originally I wrote this as `if (secondTry) ... `, but `tryTwice` won't attempt the second try
+ // unless an error is issued. We're not issuing an error, in the assumption that it would be
+ // spurious in light of the erroneous expected type
+ setError(tree)
+ }
} else {
// val applicable = alts1 filter (alt =>
// global.typer.infer.isWeaklyCompatible(pre.memberType(alt), pt))
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
index fa8aff5cdd..f1c70f46d8 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
@@ -1478,14 +1478,9 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder) // NOTE: checker must be the target of the ==, that's the patmat semantics for ya
def and(a: Tree, b: Tree): Tree = a AND b
- // drop annotations generated by CPS plugin etc, since its annotationchecker rejects T @cps[U] <: Any
- // let's assume for now annotations don't affect casts, drop them there, and bring them back using the outer Typed tree
- private def mkCast(t: Tree, tp: Type) =
- Typed(gen.mkAsInstanceOf(t, tp.withoutAnnotations, true, false), TypeTree() setType tp)
-
// the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly)
- def _asInstanceOf(t: Tree, tp: Type): Tree = if (t.tpe != NoType && t.isTyped && typesConform(t.tpe, tp)) t else mkCast(t, tp)
- def _asInstanceOf(b: Symbol, tp: Type): Tree = if (typesConform(b.info, tp)) REF(b) else mkCast(REF(b), tp)
+ def _asInstanceOf(t: Tree, tp: Type): Tree = if (t.tpe != NoType && t.isTyped && typesConform(t.tpe, tp)) t else gen.mkCastPreservingAnnotations(t, tp)
+ def _asInstanceOf(b: Symbol, tp: Type): Tree = if (typesConform(b.info, tp)) REF(b) else gen.mkCastPreservingAnnotations(REF(b), tp)
def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), tp.withoutAnnotations, true, false)
// if (typesConform(b.info, tpX)) { patmatDebug("warning: emitted spurious isInstanceOf: "+(b, tp)); TRUE }
@@ -3499,8 +3494,10 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val alternativesSupported = true
val canJump = true
- object SwitchablePattern { def unapply(pat: Tree): Option[Tree] = pat match {
- case Literal(const@Constant((_: Byte ) | (_: Short) | (_: Int ) | (_: Char ))) =>
+ // Constant folding sets the type of a constant tree to `ConstantType(Constant(folded))`
+ // The tree itself can be a literal, an ident, a selection, ...
+ object SwitchablePattern { def unapply(pat: Tree): Option[Tree] = pat.tpe match {
+ case ConstantType(const) if const.isIntRange =>
Some(Literal(Constant(const.intValue))) // TODO: Java 7 allows strings in switches
case _ => None
}}
@@ -3527,7 +3524,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
override def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree], unchecked: Boolean): Option[Tree] = { import CODE._
val regularSwitchMaker = new RegularSwitchMaker(scrutSym, matchFailGenOverride, unchecked)
// TODO: if patterns allow switch but the type of the scrutinee doesn't, cast (type-test) the scrutinee to the corresponding switchable type and switch on the result
- if (regularSwitchMaker.switchableTpe(scrutSym.tpe)) {
+ if (regularSwitchMaker.switchableTpe(scrutSym.tpe.dealias)) { // TODO: switch to dealiasWiden in 2.11
val caseDefsWithDefault = regularSwitchMaker(cases map {c => (scrutSym, c)}, pt)
if (caseDefsWithDefault isEmpty) None // not worth emitting a switch.
else {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 8722aa3666..cbcddba487 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -224,7 +224,7 @@ trait Typers extends Modes with Adaptations with Tags {
case ExistentialType(tparams, tpe) =>
new SubstWildcardMap(tparams).apply(tp)
case TypeRef(_, sym, _) if sym.isAliasType =>
- val tp0 = tp.normalize
+ val tp0 = tp.dealias
val tp1 = dropExistential(tp0)
if (tp1 eq tp0) tp else tp1
case _ => tp
@@ -439,7 +439,7 @@ trait Typers extends Modes with Adaptations with Tags {
if (!hiddenSymbols.isEmpty && hiddenSymbols.head == sym &&
sym.isAliasType && sameLength(sym.typeParams, args)) {
hiddenSymbols = hiddenSymbols.tail
- t.normalize
+ t.dealias
} else t
case SingleType(_, sym) =>
checkNoEscape(sym)
@@ -1004,6 +1004,10 @@ trait Typers extends Modes with Adaptations with Tags {
object variantToSkolem extends VariantTypeMap {
def apply(tp: Type) = mapOver(tp) match {
case TypeRef(NoPrefix, tpSym, Nil) if variance != 0 && tpSym.isTypeParameterOrSkolem && tpSym.owner.isTerm =>
+ // must initialize or tpSym.tpe might see random type params!!
+ // without this, we'll get very weird types inferred in test/scaladoc/run/SI-5933.scala
+ // TODO: why is that??
+ tpSym.initialize
val bounds = if (variance == 1) TypeBounds.upper(tpSym.tpe) else TypeBounds.lower(tpSym.tpe)
// origin must be the type param so we can deskolemize
val skolem = context.owner.newGADTSkolem(unit.freshTypeName("?"+tpSym.name), tpSym, bounds)
@@ -1026,7 +1030,7 @@ trait Typers extends Modes with Adaptations with Tags {
newTyper(ctorContext).infer.inferConstructorInstance(tree1, clazz.typeParams, ptSafe)
// simplify types without losing safety,
- // so that error messages don't unnecessarily refer to skolems
+ // so that we get rid of unnecessary type slack, and so that error messages don't unnecessarily refer to skolems
val extrapolate = new ExistentialExtrapolation(freeVars) extrapolate (_: Type)
val extrapolated = tree1.tpe match {
case MethodType(ctorArgs, res) => // ctorArgs are actually in a covariant position, since this is the type of the subpatterns of the pattern represented by this Apply node
@@ -1083,9 +1087,9 @@ trait Typers extends Modes with Adaptations with Tags {
adapt(tree setType restpe, mode, pt, original)
case TypeRef(_, ByNameParamClass, List(arg)) if ((mode & EXPRmode) != 0) => // (2)
adapt(tree setType arg, mode, pt, original)
- case tr @ TypeRef(_, sym, _) if sym.isAliasType && tr.normalize.isInstanceOf[ExistentialType] &&
+ case tr @ TypeRef(_, sym, _) if sym.isAliasType && tr.dealias.isInstanceOf[ExistentialType] &&
((mode & (EXPRmode | LHSmode)) == EXPRmode) =>
- adapt(tree setType tr.normalize.skolemizeExistential(context.owner, tree), mode, pt, original)
+ adapt(tree setType tr.dealias.skolemizeExistential(context.owner, tree), mode, pt, original)
case et @ ExistentialType(_, _) if ((mode & (EXPRmode | LHSmode)) == EXPRmode) =>
adapt(tree setType et.skolemizeExistential(context.owner, tree), mode, pt, original)
case PolyType(tparams, restpe) if inNoModes(mode, TAPPmode | PATTERNmode | HKmode) => // (3)
@@ -1155,7 +1159,7 @@ trait Typers extends Modes with Adaptations with Tags {
if (tree1.tpe <:< pt) adapt(tree1, mode, pt, original)
else {
if (inExprModeButNot(mode, FUNmode)) {
- pt.normalize match {
+ pt.dealias match {
case TypeRef(_, sym, _) =>
// note: was if (pt.typeSymbol == UnitClass) but this leads to a potentially
// infinite expansion if pt is constant type ()
@@ -1310,7 +1314,7 @@ trait Typers extends Modes with Adaptations with Tags {
def adaptToMember(qual: Tree, searchTemplate: Type, reportAmbiguous: Boolean = true, saveErrors: Boolean = true): Tree = {
if (isAdaptableWithView(qual)) {
- qual.tpe.widen.normalize match {
+ qual.tpe.dealiasWiden match {
case et: ExistentialType =>
qual setType et.skolemizeExistential(context.owner, qual) // open the existential
case _ =>
@@ -1848,7 +1852,7 @@ trait Typers extends Modes with Adaptations with Tags {
_.typedTemplate(cdef.impl, parentTypes(cdef.impl))
}
val impl2 = finishMethodSynthesis(impl1, clazz, context)
- if (clazz.isTrait && clazz.info.parents.nonEmpty && clazz.info.firstParent.normalize.typeSymbol == AnyClass)
+ if (clazz.isTrait && clazz.info.parents.nonEmpty && clazz.info.firstParent.typeSymbol == AnyClass)
checkEphemeral(clazz, impl2.body)
if ((clazz != ClassfileAnnotationClass) &&
(clazz isNonBottomSubClass ClassfileAnnotationClass))
@@ -1899,7 +1903,33 @@ trait Typers extends Modes with Adaptations with Tags {
})
}
val impl2 = finishMethodSynthesis(impl1, clazz, context)
-
+
+ // SI-5954. On second compile of a companion class contained in a package object we end up
+ // with some confusion of names which leads to having two symbols with the same name in the
+ // same owner. Until that can be straightened out we can't allow companion objects in package
+ // objects. But this code also tries to be friendly by distinguishing between case classes and
+ // user written companion pairs
+ def restrictPackageObjectMembers(mdef : ModuleDef) = for (m <- mdef.symbol.info.members) {
+ // ignore synthetic objects, because the "companion" object to a case class is synthetic and
+ // we only want one error per case class
+ if (!m.isSynthetic) {
+ // can't handle case classes in package objects
+ if (m.isCaseClass) pkgObjectRestriction(m, mdef, "case")
+ // can't handle companion class/object pairs in package objects
+ else if ((m.isClass && m.companionModule != NoSymbol && !m.companionModule.isSynthetic) ||
+ (m.isModule && m.companionClass != NoSymbol && !m.companionClass.isSynthetic))
+ pkgObjectRestriction(m, mdef, "companion")
+ }
+
+ def pkgObjectRestriction(m : Symbol, mdef : ModuleDef, restricted : String) = {
+ val pkgName = mdef.symbol.ownerChain find (_.isPackage) map (_.decodedName) getOrElse mdef.symbol.toString
+ context.error(if (m.pos.isDefined) m.pos else mdef.pos, s"implementation restriction: package object ${pkgName} cannot contain ${restricted} ${m}. Instead, ${m} should be placed directly in package ${pkgName}.")
+ }
+ }
+
+ if (!settings.companionsInPkgObjs.value && mdef.symbol.isPackageObject)
+ restrictPackageObjectMembers(mdef)
+
treeCopy.ModuleDef(mdef, typedMods, mdef.name, impl2) setType NoType
}
/** In order to override this in the TreeCheckers Typer so synthetics aren't re-added
@@ -2078,57 +2108,60 @@ trait Typers extends Modes with Adaptations with Tags {
*/
def computeParamAliases(clazz: Symbol, vparamss: List[List[ValDef]], rhs: Tree) {
debuglog(s"computing param aliases for $clazz:${clazz.primaryConstructor.tpe}:$rhs")
+ val pending = ListBuffer[AbsTypeError]()
+
+ // !!! This method is redundant with other, less buggy ones.
def decompose(call: Tree): (Tree, List[Tree]) = call match {
case Apply(fn, args) =>
- val (superConstr, args1) = decompose(fn)
+ // an object cannot be allowed to pass a reference to itself to a superconstructor
+ // because of initialization issues; SI-473, SI-3913, SI-6928.
+ foreachSubTreeBoundTo(args, clazz) { tree =>
+ if (tree.symbol.isModule)
+ pending += SuperConstrReferenceError(tree)
+ tree match {
+ case This(qual) =>
+ pending += SuperConstrArgsThisReferenceError(tree)
+ case _ => ()
+ }
+ }
+ val (superConstr, preArgs) = decompose(fn)
val params = fn.tpe.params
- val args2 = if (params.isEmpty || !isRepeatedParamType(params.last.tpe)) args
- else args.take(params.length - 1) :+ EmptyTree
- assert(sameLength(args2, params) || call.isErrorTyped, "mismatch " + clazz + " " + (params map (_.tpe)) + " " + args2)//debug
- (superConstr, args1 ::: args2)
- case Block(stats, expr) if !stats.isEmpty =>
- decompose(stats.last)
+ // appending a dummy tree to represent Nil for an empty varargs (is this really necessary?)
+ val applyArgs = if (args.length < params.length) args :+ EmptyTree else args take params.length
+
+ assert(sameLength(applyArgs, params) || call.isErrorTyped,
+ s"arity mismatch but call is not error typed: $clazz (params=$params, args=$applyArgs)")
+
+ (superConstr, preArgs ::: applyArgs)
+ case Block(_ :+ superCall, _) =>
+ decompose(superCall)
case _ =>
- (call, List())
+ (call, Nil)
}
val (superConstr, superArgs) = decompose(rhs)
assert(superConstr.symbol ne null, superConstr)//debug
-
- val pending = ListBuffer[AbsTypeError]()
- // an object cannot be allowed to pass a reference to itself to a superconstructor
- // because of initialization issues; bug #473
- foreachSubTreeBoundTo(superArgs, clazz) { tree =>
- if (tree.symbol.isModule)
- pending += SuperConstrReferenceError(tree)
- tree match {
- case This(qual) =>
- pending += SuperConstrArgsThisReferenceError(tree)
- case _ => ()
- }
- }
-
- if (superConstr.symbol.isPrimaryConstructor) {
- val superClazz = superConstr.symbol.owner
- if (!superClazz.isJavaDefined) {
- val superParamAccessors = superClazz.constrParamAccessors
- if (sameLength(superParamAccessors, superArgs)) {
- for ((superAcc, superArg @ Ident(name)) <- superParamAccessors zip superArgs) {
- if (vparamss.exists(_.exists(_.symbol == superArg.symbol))) {
- var alias = superAcc.initialize.alias
- if (alias == NoSymbol)
- alias = superAcc.getter(superAcc.owner)
- if (alias != NoSymbol &&
- superClazz.info.nonPrivateMember(alias.name) != alias)
- alias = NoSymbol
- if (alias != NoSymbol) {
- var ownAcc = clazz.info.decl(name).suchThat(_.isParamAccessor)
- if ((ownAcc hasFlag ACCESSOR) && !ownAcc.isDeferred)
- ownAcc = ownAcc.accessed
- if (!ownAcc.isVariable && !alias.accessed.isVariable) {
- debuglog("" + ownAcc + " has alias "+alias.fullLocationString) //debug
- ownAcc.asInstanceOf[TermSymbol].setAlias(alias)
- }
- }
+ def superClazz = superConstr.symbol.owner
+ def superParamAccessors = superClazz.constrParamAccessors
+
+ // associate superclass paramaccessors with their aliases
+ if (superConstr.symbol.isPrimaryConstructor && !superClazz.isJavaDefined && sameLength(superParamAccessors, superArgs)) {
+ for ((superAcc, superArg @ Ident(name)) <- superParamAccessors zip superArgs) {
+ if (mexists(vparamss)(_.symbol == superArg.symbol)) {
+ val alias = (
+ superAcc.initialize.alias
+ orElse (superAcc getter superAcc.owner)
+ filter (alias => superClazz.info.nonPrivateMember(alias.name) != alias)
+ )
+ if (alias.exists && !alias.accessed.isVariable) {
+ val ownAcc = clazz.info decl name suchThat (_.isParamAccessor) match {
+ case acc if !acc.isDeferred && acc.hasAccessorFlag => acc.accessed
+ case acc => acc
+ }
+ ownAcc match {
+ case acc: TermSymbol if !acc.isVariable =>
+ debuglog(s"$acc has alias ${alias.fullLocationString}")
+ acc setAlias alias
+ case _ =>
}
}
}
@@ -2531,11 +2564,7 @@ trait Typers extends Modes with Adaptations with Tags {
}
// body1 = checkNoEscaping.locals(context.scope, pt, body1)
- val treeWithSkolems = treeCopy.CaseDef(cdef, pat1, guard1, body1) setType body1.tpe
-
- new TypeMapTreeSubstituter(deskolemizeGADTSkolems).traverse(treeWithSkolems)
-
- treeWithSkolems // now without skolems, actually
+ treeCopy.CaseDef(cdef, pat1, guard1, body1) setType body1.tpe
}
// undo adaptConstrPattern's evil deeds, as they confuse the old pattern matcher
@@ -2570,7 +2599,10 @@ trait Typers extends Modes with Adaptations with Tags {
val casesAdapted = if (!needAdapt) casesTyped else casesTyped map (adaptCase(_, mode, resTp))
- treeCopy.Match(tree, selector1, casesAdapted) setType resTp
+ val matchTyped = treeCopy.Match(tree, selector1, casesAdapted) setType resTp
+ if (!newPatternMatching) // TODO: remove this in 2.11 -- only needed for old pattern matcher
+ new TypeMapTreeSubstituter(deskolemizeGADTSkolems).traverse(matchTyped)
+ matchTyped
}
// match has been typed -- virtualize it if we're feeling experimental
@@ -2593,174 +2625,177 @@ trait Typers extends Modes with Adaptations with Tags {
match_ // will be translated in phase `patmat`
}
- // synthesize and type check a PartialFunction implementation based on a match specified by `cases`
- // Match(EmptyTree, cases) ==> new PartialFunction { def apply<OrElse>(params) = `translateMatch('`(param1,...,paramN)` match { cases }')` }
- // for fresh params, the selector of the match we'll translated simply gathers those in a tuple
- // NOTE: restricted to PartialFunction -- leave Function trees if the expected type does not demand a partial function
- class MatchFunTyper(tree: Tree, cases: List[CaseDef], mode: Int, pt0: Type) {
- // TODO: remove FunctionN support -- this is currently designed so that it can emit FunctionN and PartialFunction subclasses
- // however, we should leave Function nodes until Uncurry so phases after typer can still detect normal Function trees
- // we need to synthesize PartialFunction impls, though, to avoid nastiness in Uncurry in transforming&duplicating generated pattern matcher trees
- // TODO: remove PartialFunction support from UnCurry
- private val pt = deskolemizeGADTSkolems(pt0)
- private val targs = pt.normalize.typeArgs
- private val arity = if (isFunctionType(pt)) targs.length - 1 else 1 // TODO pt should always be a (Partial)Function, right?
- private val ptRes = if (targs.isEmpty) WildcardType else targs.last // may not be fully defined
-
- private val isPartial = pt.typeSymbol == PartialFunctionClass
- assert(isPartial)
-
- private val anonClass = context.owner.newAnonymousFunctionClass(tree.pos)
- private val funThis = This(anonClass)
-
- anonClass addAnnotation AnnotationInfo(SerialVersionUIDAttr.tpe, List(Literal(Constant(0))), List())
-
- def deriveFormals =
- if (targs.isEmpty) Nil
- else targs.init
-
- def mkParams(methodSym: Symbol, formals: List[Type] = deriveFormals) =
- if (formals.isEmpty || !formals.forall(isFullyDefined)) { MissingParameterTypeAnonMatchError(tree, pt); Nil }
- else methodSym newSyntheticValueParams formals
-
- def mkSel(params: List[Symbol]) =
- if (params.isEmpty) EmptyTree
- else {
- val ids = params map (p => Ident(p.name))
- atPos(tree.pos.focusStart) { if (arity == 1) ids.head else gen.mkTuple(ids) }
- }
-
- import CODE._
+ /** synthesize and type check a PartialFunction implementation based on the match in `tree`
+ *
+ * `param => sel match { cases }` becomes:
+ *
+ * new AbstractPartialFunction[$argTp, $matchResTp] {
+ * def applyOrElse[A1 <: $argTp, B1 >: $matchResTp]($param: A1, default: A1 => B1): B1 =
+ * $selector match { $cases }
+ * def isDefinedAt(x: $argTp): Boolean =
+ * $selector match { $casesTrue }
+ * }
+ *
+ * TODO: it would be nicer to generate the tree specified above at once and type it as a whole,
+ * there are two gotchas:
+ * - matchResTp may not be known until we've typed the match (can only use resTp when it's fully defined),
+ * - if we typed the match in isolation first, you'd know its result type, but would have to re-jig the owner structure
+ * - could we use a type variable for matchResTp and backpatch it?
+ * - occurrences of `this` in `cases` or `sel` must resolve to the this of the class originally enclosing the match,
+ * not of the anonymous partial function subclass
+ *
+ * an alternative TODO: add partial function AST node or equivalent and get rid of this synthesis --> do everything in uncurry (or later)
+ * however, note that pattern matching codegen is designed to run *before* uncurry
+ */
+ def synthesizePartialFunction(paramName: TermName, paramPos: Position, tree: Tree, mode: Int, pt0: Type): Tree = {
+ assert(pt0.typeSymbol == PartialFunctionClass, s"PartialFunction synthesis for match in $tree requires PartialFunction expected type, but got $pt0.")
- // need to duplicate the cases before typing them to generate the apply method, or the symbols will be all messed up
- val casesTrue = if (isPartial) cases map (c => deriveCaseDef(c)(x => atPos(x.pos.focus)(TRUE_typed)).duplicate.asInstanceOf[CaseDef]) else Nil
- // println("casesTrue "+ casesTrue)
- def parentsPartial(targs: List[Type]) = addSerializable(appliedType(AbstractPartialFunctionClass.typeConstructor, targs))
+ val pt = deskolemizeGADTSkolems(pt0)
+ val targs = pt.normalize.typeArgs
- def applyMethod = {
- // rig the show so we can get started typing the method body -- later we'll correct the infos...
- anonClass setInfo ClassInfoType(addSerializable(ObjectClass.tpe, pt), newScope, anonClass)
- val methodSym = anonClass.newMethod(nme.apply, tree.pos, if(isPartial) (FINAL | OVERRIDE) else FINAL)
- val paramSyms = mkParams(methodSym)
- val selector = mkSel(paramSyms)
+ // if targs.head isn't fully defined, we can translate --> error
+ targs match {
+ case argTp :: _ if isFullyDefined(argTp) => // ok
+ case _ => // uh-oh
+ MissingParameterTypeAnonMatchError(tree, pt)
+ return setError(tree)
+ }
- if (selector eq EmptyTree) EmptyTree
- else {
- methodSym setInfoAndEnter MethodType(paramSyms, AnyClass.tpe)
+ // NOTE: resTp still might not be fully defined
+ val argTp :: resTp :: Nil = targs
- val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym)) // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
- paramSyms foreach (methodBodyTyper.context.scope enter _)
+ // targs must conform to Any for us to synthesize an applyOrElse (fallback to apply otherwise -- typically for @cps annotated targs)
+ val targsValidParams = targs forall (_ <:< AnyClass.tpe)
- val match_ = methodBodyTyper.typedMatch(gen.mkUnchecked(selector), cases, mode, ptRes)
- val resTp = match_.tpe
+ val anonClass = (context.owner
+ newAnonymousFunctionClass tree.pos
+ addAnnotation AnnotationInfo(SerialVersionUIDAttr.tpe, List(Literal(Constant(0))), List()))
- val methFormals = paramSyms map (_.tpe)
- val parents = (
- if (isPartial) parentsPartial(List(methFormals.head, resTp))
- else addSerializable(abstractFunctionType(methFormals, resTp))
- )
- anonClass setInfo ClassInfoType(parents, newScope, anonClass)
- methodSym setInfoAndEnter MethodType(paramSyms, resTp)
+ import CODE._
- DefDef(methodSym, methodBodyTyper.virtualizedMatch(match_, mode, resTp))
- }
- }
+ val Match(sel, cases) = tree
- // def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 =
+ // need to duplicate the cases before typing them to generate the apply method, or the symbols will be all messed up
+ val casesTrue = cases map (c => deriveCaseDef(c)(x => atPos(x.pos.focus)(TRUE_typed)).duplicate.asInstanceOf[CaseDef])
+
+ // must generate a new tree every time
+ def selector: Tree = gen.mkUnchecked(
+ if (sel != EmptyTree) sel.duplicate
+ else atPos(tree.pos.focusStart)(
+ // SI-6925: subsume type of the selector to `argTp`
+ // we don't want/need the match to see the `A1` type that we must use for variance reasons in the method signature
+ //
+ // this failed: replace `selector` by `Typed(selector, TypeTree(argTp))` -- as it's an upcast, this should never fail,
+ // `(x: A1): A` doesn't always type check, even though `A1 <: A`, due to singleton types (test/files/pos/t4269.scala)
+ // hence the cast, which will be erased in posterasure
+ // (the cast originally caused extremely weird types to show up
+ // in test/scaladoc/run/SI-5933.scala because `variantToSkolem` was missing `tpSym.initialize`)
+ gen.mkCastPreservingAnnotations(Ident(paramName), argTp)
+ ))
+
+ def mkParam(methodSym: Symbol, tp: Type = argTp) =
+ methodSym.newValueParameter(paramName, paramPos.focus, SYNTHETIC) setInfo tp
+
+ // `def applyOrElse[A1 <: $argTp, B1 >: $matchResTp](x: A1, default: A1 => B1): B1 =
+ // ${`$selector match { $cases }` updateAttachment DefaultOverrideMatchAttachment(REF(default) APPLY (REF(x)))}`
def applyOrElseMethodDef = {
- // rig the show so we can get started typing the method body -- later we'll correct the infos...
- // targs were type arguments for PartialFunction, so we know they will work for AbstractPartialFunction as well
- anonClass setInfo ClassInfoType(parentsPartial(targs), newScope, anonClass)
val methodSym = anonClass.newMethod(nme.applyOrElse, tree.pos, FINAL | OVERRIDE)
// create the parameter that corresponds to the function's parameter
- val List(argTp) = deriveFormals
- val A1 = methodSym newTypeParameter(newTypeName("A1")) setInfo TypeBounds.upper(argTp)
- val paramSyms@List(x) = mkParams(methodSym, List(A1.tpe))
- val selector = mkSel(paramSyms)
+ val A1 = methodSym newTypeParameter (newTypeName("A1")) setInfo TypeBounds.upper(argTp)
+ val x = mkParam(methodSym, A1.tpe)
- if (selector eq EmptyTree) EmptyTree
- else {
- // applyOrElse's default parameter:
- val B1 = methodSym newTypeParameter(newTypeName("B1")) setInfo TypeBounds.empty //lower(resTp)
- val default = methodSym newValueParameter(newTermName("default"), tree.pos.focus, SYNTHETIC) setInfo functionType(List(A1.tpe), B1.tpe)
+ // applyOrElse's default parameter:
+ val B1 = methodSym newTypeParameter (newTypeName("B1")) setInfo TypeBounds.empty //lower(resTp)
+ val default = methodSym newValueParameter (newTermName("default"), tree.pos.focus, SYNTHETIC) setInfo functionType(List(A1.tpe), B1.tpe)
- val paramSyms = List(x, default)
- methodSym setInfoAndEnter polyType(List(A1, B1), MethodType(paramSyms, B1.tpe))
+ val paramSyms = List(x, default)
+ methodSym setInfo polyType(List(A1, B1), MethodType(paramSyms, B1.tpe))
- val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym)) // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
- paramSyms foreach (methodBodyTyper.context.scope enter _)
+ val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym))
+ // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
+ paramSyms foreach (methodBodyTyper.context.scope enter _)
- val match_ = methodBodyTyper.typedMatch(gen.mkUnchecked(selector), cases, mode, ptRes)
- val resTp = match_.tpe
+ val match_ = methodBodyTyper.typedMatch(selector, cases, mode, resTp)
- anonClass setInfo ClassInfoType(parentsPartial(List(argTp, resTp)), newScope, anonClass)
- B1 setInfo TypeBounds.lower(resTp)
- anonClass.info.decls enter methodSym // methodSym's info need not change (B1's bound has been updated instead)
+ val matchResTp = match_.tpe
+ B1 setInfo TypeBounds.lower(matchResTp) // patch info
- match_ setType B1.tpe
+ match_ setType B1.tpe
- // the default uses applyOrElse's first parameter since the scrut's type has been widened
- val body = methodBodyTyper.virtualizedMatch(match_ updateAttachment DefaultOverrideMatchAttachment(REF(default) APPLY (REF(x))), mode, B1.tpe)
-
- DefDef(methodSym, body)
- }
+ // the default uses applyOrElse's first parameter since the scrut's type has been widened
+ val matchWithDefault = match_ updateAttachment DefaultOverrideMatchAttachment(REF(default) APPLY (REF(x)))
+ (DefDef(methodSym, methodBodyTyper.virtualizedMatch(matchWithDefault, mode, B1.tpe)), matchResTp)
}
+ // `def isDefinedAt(x: $argTp): Boolean = ${`$selector match { $casesTrue ` updateAttachment DefaultOverrideMatchAttachment(FALSE_typed)}`
def isDefinedAtMethod = {
val methodSym = anonClass.newMethod(nme.isDefinedAt, tree.pos.makeTransparent, FINAL)
- val paramSyms = mkParams(methodSym)
- val selector = mkSel(paramSyms)
+ val paramSym = mkParam(methodSym)
- if (selector eq EmptyTree) EmptyTree
- else {
- val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym)) // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
- paramSyms foreach (methodBodyTyper.context.scope enter _)
- methodSym setInfoAndEnter MethodType(paramSyms, BooleanClass.tpe)
+ val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym)) // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
+ methodBodyTyper.context.scope enter paramSym
+ methodSym setInfo MethodType(List(paramSym), BooleanClass.tpe)
- val match_ = methodBodyTyper.typedMatch(gen.mkUnchecked(selector), casesTrue, mode, BooleanClass.tpe)
- val body = methodBodyTyper.virtualizedMatch(match_ updateAttachment DefaultOverrideMatchAttachment(FALSE_typed), mode, BooleanClass.tpe)
+ val match_ = methodBodyTyper.typedMatch(selector, casesTrue, mode, BooleanClass.tpe)
- DefDef(methodSym, body)
- }
+ val matchWithDefault = match_ updateAttachment DefaultOverrideMatchAttachment(FALSE_typed)
+ DefDef(methodSym, methodBodyTyper.virtualizedMatch(matchWithDefault, mode, BooleanClass.tpe))
}
- lazy val members = if (isPartial) {
- // somehow @cps annotations upset the typer when looking at applyOrElse's signature, but not apply's
- // TODO: figure out the details (T @cps[U] is not a subtype of Any, but then why does it work for the apply method?)
- if (targs forall (_ <:< AnyClass.tpe)) List(applyOrElseMethodDef, isDefinedAtMethod)
- else List(applyMethod, isDefinedAtMethod)
- } else List(applyMethod)
+ // only used for @cps annotated partial functions
+ // `def apply(x: $argTp): $matchResTp = $selector match { $cases }`
+ def applyMethod = {
+ val methodSym = anonClass.newMethod(nme.apply, tree.pos, FINAL | OVERRIDE)
+ val paramSym = mkParam(methodSym)
- def translated =
- if (members.head eq EmptyTree) setError(tree)
- else {
- val typedBlock = typedPos(tree.pos, mode, pt) {
- Block(ClassDef(anonClass, NoMods, ListOfNil, members, tree.pos.focus), atPos(tree.pos.focus)(New(anonClass.tpe)))
- }
- // Don't leak implementation details into the type, see SI-6575
- if (isPartial && !typedBlock.isErrorTyped)
- typedPos(tree.pos, mode, pt) {
- Typed(typedBlock, TypeTree(typedBlock.tpe baseType PartialFunctionClass))
- }
- else typedBlock
- }
- }
+ methodSym setInfo MethodType(List(paramSym), AnyClass.tpe)
+
+ val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym))
+ // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
+ methodBodyTyper.context.scope enter paramSym
- // Function(params, Match(sel, cases)) ==> new <Partial>Function { def apply<OrElse>(params) = `translateMatch('sel match { cases }')` }
- class MatchFunTyperBetaReduced(fun: Function, sel: Tree, cases: List[CaseDef], mode: Int, pt: Type) extends MatchFunTyper(fun, cases, mode, pt) {
- override def deriveFormals =
- fun.vparams map { p => if(p.tpt.tpe == null) typedType(p.tpt).tpe else p.tpt.tpe }
+ val match_ = methodBodyTyper.typedMatch(selector, cases, mode, resTp)
- // the only difference from the super class is that we must preserve the names of the parameters
- override def mkParams(methodSym: Symbol, formals: List[Type] = deriveFormals) =
- (fun.vparams, formals).zipped map { (p, tp) =>
- methodSym.newValueParameter(p.name, p.pos.focus, SYNTHETIC) setInfo tp
+ val matchResTp = match_.tpe
+ methodSym setInfo MethodType(List(paramSym), matchResTp) // patch info
+
+ (DefDef(methodSym, methodBodyTyper.virtualizedMatch(match_, mode, matchResTp)), matchResTp)
+ }
+
+ def parents(resTp: Type) = addSerializable(appliedType(AbstractPartialFunctionClass.typeConstructor, List(argTp, resTp)))
+
+ val members = {
+ val (applyMeth, matchResTp) = {
+ // rig the show so we can get started typing the method body -- later we'll correct the infos...
+ // targs were type arguments for PartialFunction, so we know they will work for AbstractPartialFunction as well
+ anonClass setInfo ClassInfoType(parents(resTp), newScope, anonClass)
+
+ // somehow @cps annotations upset the typer when looking at applyOrElse's signature, but not apply's
+ // TODO: figure out the details (T @cps[U] is not a subtype of Any, but then why does it work for the apply method?)
+ if (targsValidParams) applyOrElseMethodDef
+ else applyMethod
}
- override def mkSel(params: List[Symbol]) = sel.duplicate
+ // patch info to the class's definitive info
+ anonClass setInfo ClassInfoType(parents(matchResTp), newScope, anonClass)
+ List(applyMeth, isDefinedAtMethod)
+ }
+
+ members foreach (m => anonClass.info.decls enter m.symbol)
+
+ val typedBlock = typedPos(tree.pos, mode, pt) {
+ Block(ClassDef(anonClass, NoMods, ListOfNil, members, tree.pos.focus), atPos(tree.pos.focus)(New(anonClass.tpe)))
+ }
+
+ if (typedBlock.isErrorTyped) typedBlock
+ else // Don't leak implementation details into the type, see SI-6575
+ typedPos(tree.pos, mode, pt) {
+ Typed(typedBlock, TypeTree(typedBlock.tpe baseType PartialFunctionClass))
+ }
}
+
/**
* @param fun ...
* @param mode ...
@@ -2812,14 +2847,17 @@ trait Typers extends Modes with Adaptations with Tags {
}
fun.body match {
- // later phase indicates scaladoc is calling (where shit is messed up, I tell you)
- // -- so fall back to old patmat, which is more forgiving
+ // translate `x => x match { <cases> }` : PartialFunction to
+ // `new PartialFunction { def applyOrElse(x, default) = x match { <cases> } def isDefinedAt(x) = ... }`
case Match(sel, cases) if (sel ne EmptyTree) && newPatternMatching && (pt.typeSymbol == PartialFunctionClass) =>
// go to outer context -- must discard the context that was created for the Function since we're discarding the function
// thus, its symbol, which serves as the current context.owner, is not the right owner
// you won't know you're using the wrong owner until lambda lift crashes (unless you know better than to use the wrong owner)
val outerTyper = newTyper(context.outer)
- (new outerTyper.MatchFunTyperBetaReduced(fun, sel, cases, mode, pt)).translated
+ val p = fun.vparams.head
+ if (p.tpt.tpe == null) p.tpt setType outerTyper.typedType(p.tpt).tpe
+
+ outerTyper.synthesizePartialFunction(p.name, p.pos, fun.body, mode, pt)
case _ =>
val vparamSyms = fun.vparams map { vparam =>
enterSym(context, vparam)
@@ -3470,7 +3508,7 @@ trait Typers extends Modes with Adaptations with Tags {
val resTp = fun1.tpe.finalResultType.normalize
val nbSubPats = args.length
- val (formals, formalsExpanded) = extractorFormalTypes(resTp, nbSubPats, fun1.symbol)
+ val (formals, formalsExpanded) = extractorFormalTypes(fun0.pos, resTp, nbSubPats, fun1.symbol)
if (formals == null) duplErrorTree(WrongNumberOfArgsError(tree, fun))
else {
val args1 = typedArgs(args, mode, formals, formalsExpanded)
@@ -3642,15 +3680,18 @@ trait Typers extends Modes with Adaptations with Tags {
} else if (argss.length > 1) {
reportAnnotationError(MultipleArgumentListForAnnotationError(ann))
} else {
- val args =
- if (argss.head.length == 1 && !isNamed(argss.head.head))
- List(new AssignOrNamedArg(Ident(nme.value), argss.head.head))
- else argss.head
val annScope = annType.decls
.filter(sym => sym.isMethod && !sym.isConstructor && sym.isJavaDefined)
val names = new scala.collection.mutable.HashSet[Symbol]
+ def hasValue = names exists (_.name == nme.value)
names ++= (if (isJava) annScope.iterator
else typedFun.tpe.params.iterator)
+ val args = argss match {
+ case List(List(arg)) if !isNamed(arg) && hasValue =>
+ List(new AssignOrNamedArg(Ident(nme.value), arg))
+ case as :: _ => as
+ }
+
val nvPairs = args map {
case arg @ AssignOrNamedArg(Ident(name), rhs) =>
val sym = if (isJava) annScope.lookup(name)
@@ -3873,7 +3914,7 @@ trait Typers extends Modes with Adaptations with Tags {
val normalizeLocals = new TypeMap {
def apply(tp: Type): Type = tp match {
case TypeRef(pre, sym, args) =>
- if (sym.isAliasType && containsLocal(tp)) apply(tp.normalize)
+ if (sym.isAliasType && containsLocal(tp)) apply(tp.dealias)
else {
if (pre.isVolatile)
InferTypeWithVolatileTypeSelectionError(tree, pre)
@@ -4352,7 +4393,8 @@ trait Typers extends Modes with Adaptations with Tags {
val selector = tree.selector
val cases = tree.cases
if (selector == EmptyTree) {
- if (newPatternMatching && (pt.typeSymbol == PartialFunctionClass)) (new MatchFunTyper(tree, cases, mode, pt)).translated
+ if (newPatternMatching && (pt.typeSymbol == PartialFunctionClass))
+ synthesizePartialFunction(newTermName(context.unit.fresh.newName("x")), tree.pos, tree, mode, pt)
else {
val arity = if (isFunctionType(pt)) pt.normalize.typeArgs.length - 1 else 1
val params = for (i <- List.range(0, arity)) yield
@@ -4401,7 +4443,13 @@ trait Typers extends Modes with Adaptations with Tags {
def typedNew(tree: New) = {
val tpt = tree.tpt
val tpt1 = {
- val tpt0 = typedTypeConstructor(tpt)
+ // This way typedNew always returns a dealiased type. This used to happen by accident
+ // for instantiations without type arguments due to ad hoc code in typedTypeConstructor,
+ // and annotations depended on it (to the extent that they worked, which they did
+ // not when given a parameterized type alias which dealiased to an annotation.)
+ // typedTypeConstructor dealiases nothing now, but it makes sense for a "new" to always be
+ // given a dealiased type.
+ val tpt0 = typedTypeConstructor(tpt) modifyType (_.dealias)
if (checkStablePrefixClassType(tpt0))
if (tpt0.hasSymbol && !tpt0.symbol.typeParams.isEmpty) {
context.undetparams = cloneSymbols(tpt0.symbol.typeParams)
@@ -5310,7 +5358,7 @@ trait Typers extends Modes with Adaptations with Tags {
def typedUnApply(tree: UnApply) = {
val fun1 = typed(tree.fun)
- val tpes = formalTypes(unapplyTypeList(tree.fun.symbol, fun1.tpe, tree.args.length), tree.args.length)
+ val tpes = formalTypes(unapplyTypeList(tree.fun.pos, tree.fun.symbol, fun1.tpe, tree.args.length), tree.args.length)
val args1 = map2(tree.args, tpes)(typedPattern)
treeCopy.UnApply(tree, fun1, args1) setType pt
}
@@ -5741,29 +5789,18 @@ trait Typers extends Modes with Adaptations with Tags {
def typedTypeConstructor(tree: Tree, mode: Int): Tree = {
val result = typed(tree, forTypeMode(mode) | FUNmode, WildcardType)
- val restpe = result.tpe.normalize // normalize to get rid of type aliases for the following check (#1241)
- if (!phase.erasedTypes && restpe.isInstanceOf[TypeRef] && !restpe.prefix.isStable && !context.unit.isJava) {
- // The isJava exception if OK only because the only type constructors scalac gets
- // to see are those in the signatures. These do not need a unique object as a prefix.
- // The situation is different for new's and super's, but scalac does not look deep
- // enough to see those. See #3938
- ConstructorPrefixError(tree, restpe)
- } else {
- //@M fix for #2208
- // if there are no type arguments, normalization does not bypass any checks, so perform it to get rid of AnyRef
- if (result.tpe.typeArgs.isEmpty) {
- // minimal check: if(result.tpe.typeSymbolDirect eq AnyRefClass) {
- // must expand the fake AnyRef type alias, because bootstrapping (init in Definitions) is not
- // designed to deal with the cycles in the scala package (ScalaObject extends
- // AnyRef, but the AnyRef type alias is entered after the scala package is
- // loaded and completed, so that ScalaObject is unpickled while AnyRef is not
- // yet defined )
- // !!! TODO - revisit now that ScalaObject is gone.
- result setType(restpe)
- } else { // must not normalize: type application must be (bounds-)checked (during RefChecks), see #2208
+ // get rid of type aliases for the following check (#1241)
+ result.tpe.dealias match {
+ case restpe @ TypeRef(pre, _, _) if !phase.erasedTypes && !pre.isStable && !context.unit.isJava =>
+ // The isJava exception if OK only because the only type constructors scalac gets
+ // to see are those in the signatures. These do not need a unique object as a prefix.
+ // The situation is different for new's and super's, but scalac does not look deep
+ // enough to see those. See #3938
+ ConstructorPrefixError(tree, restpe)
+ case _ =>
+ // must not normalize: type application must be (bounds-)checked (during RefChecks), see #2208
// during uncurry (after refchecks), all types are normalized
result
- }
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index a34d7389bf..5782d7bbca 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -34,12 +34,12 @@ trait Unapplies extends ast.TreeDSL
/** returns type list for return type of the extraction
* @see extractorFormalTypes
*/
- def unapplyTypeList(ufn: Symbol, ufntpe: Type, nbSubPats: Int) = {
+ def unapplyTypeList(pos: Position, ufn: Symbol, ufntpe: Type, nbSubPats: Int) = {
assert(ufn.isMethod, ufn)
//Console.println("utl "+ufntpe+" "+ufntpe.typeSymbol)
ufn.name match {
case nme.unapply | nme.unapplySeq =>
- val (formals, _) = extractorFormalTypes(unapplyUnwrap(ufntpe), nbSubPats, ufn)
+ val (formals, _) = extractorFormalTypes(pos, unapplyUnwrap(ufntpe), nbSubPats, ufn)
if (formals == null) throw new TypeError(s"$ufn of type $ufntpe cannot extract $nbSubPats sub-patterns")
else formals
case _ => throw new TypeError(ufn+" is not an unapply or unapplySeq")
diff --git a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java
index 65654be69b..8dbca6da4b 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java
@@ -1372,7 +1372,7 @@ public class ForkJoinPool extends AbstractExecutorService {
}
if (ex != null) // rethrow
- U.throwException(ex);
+ ForkJoinTask.rethrow(ex);
}
diff --git a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java
index 15c60118b3..839fd26b39 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java
@@ -595,6 +595,30 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
}
}
}
+
+ /**
+ * A version of "sneaky throw" to relay exceptions
+ */
+ static void rethrow(final Throwable ex) {
+ if (ex != null) {
+ if (ex instanceof Error)
+ throw (Error)ex;
+ if (ex instanceof RuntimeException)
+ throw (RuntimeException)ex;
+ ForkJoinTask.<RuntimeException>uncheckedThrow(ex);
+ }
+ }
+
+ /**
+ * The sneaky part of sneaky throw, relying on generics
+ * limitations to evade compiler complaints about rethrowing
+ * unchecked exceptions
+ */
+ @SuppressWarnings("unchecked") static <T extends Throwable>
+ void uncheckedThrow(Throwable t) throws T {
+ if (t != null)
+ throw (T)t; // rely on vacuous cast
+ }
/**
* Throws exception, if any, associated with the given status.
@@ -604,7 +628,7 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
(s == EXCEPTIONAL) ? getThrowableException() :
null);
if (ex != null)
- U.throwException(ex);
+ ForkJoinTask.rethrow(ex);
}
// public methods
@@ -742,7 +766,7 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
}
}
if (ex != null)
- U.throwException(ex);
+ ForkJoinTask.rethrow(ex);
}
/**
@@ -799,7 +823,7 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
}
}
if (ex != null)
- U.throwException(ex);
+ ForkJoinTask.rethrow(ex);
return tasks;
}
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index d53d000e90..a448ac2c09 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -148,6 +148,20 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
def foldRight[B](z: B)(op: (A, B) => B): B =
reversed.foldLeft(z)((x, y) => op(y, x))
+ /** Applies a binary operator to all elements of this $coll,
+ * going left to right.
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return the result of inserting `op` between consecutive elements of this $coll,
+ * going left to right:
+ * {{{
+ * op( op( ... op(x_1, x_2) ..., x_{n-1}), x_n)
+ * }}}
+ * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
+ * @throws `UnsupportedOperationException` if this $coll is empty. */
def reduceLeft[B >: A](op: (B, A) => B): B = {
if (isEmpty)
throw new UnsupportedOperationException("empty.reduceLeft")
diff --git a/src/library/scala/collection/generic/IsTraversableLike.scala b/src/library/scala/collection/generic/IsTraversableLike.scala
index b45279229b..c70772d8f9 100644
--- a/src/library/scala/collection/generic/IsTraversableLike.scala
+++ b/src/library/scala/collection/generic/IsTraversableLike.scala
@@ -9,26 +9,97 @@
package scala.collection
package generic
-/** Type class witnessing that a collection representation type `Repr` has
- * elements of type `A` and has a conversion to `GenTraversableLike[A, Repr]`.
+/** A trait which can be used to avoid code duplication when defining extension
+ * methods that should be applicable both to existing Scala collections (i.e.,
+ * types extending `GenTraversableLike`) as well as other (potentially user-defined)
+ * types that could be converted to a Scala collection type. This trait
+ * makes it possible to treat Scala collections and types that can be implicitly
+ * converted to a collection type uniformly. For example, one can provide
+ * extension methods that work both on collection types and on `String`s (`String`s
+ * do not extend `GenTraversableLike`, but can be converted to `GenTraversableLike`)
*
- * This type enables simple enrichment of `GenTraversable`s with extension
- * methods which can make full use of the mechanics of the Scala collections
- * framework in their implementation.
+ * `IsTraversable` provides two members:
+ *
+ * 1. type member `A`, which represents the element type of the target `GenTraversableLike[A, Repr]`
+ * 1. value member `conversion`, which provides a way to convert between the type we wish to add extension methods to, `Repr`, and `GenTraversableLike[A, Repr]`.
+ *
+ * ===Usage===
+ *
+ * One must provide `IsTraversableLike` as an implicit parameter type of an implicit
+ * conversion. Its usage is shown below. Our objective in the following example
+ * is to provide a generic extension method `mapReduce` to any type that extends
+ * or can be converted to `GenTraversableLike`. In our example, this includes
+ * `String`.
*
- * Example usage,
* {{{
- * class FilterMapImpl[A, Repr](val r: GenTraversableLike[A, Repr]) {
- * final def filterMap[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That =
- * r.flatMap(f(_).toSeq)
+ * import scala.collection.GenTraversableLike
+ * import scala.collection.generic.IsTraversableLike
+ *
+ * class ExtensionMethods[A, Repr](coll: GenTraversableLike[A, Repr]) {
+ * def mapReduce[B](mapper: A => B)(reducer: (B, B) => B): B = {
+ * val iter = coll.toIterator
+ * var res = mapper(iter.next())
+ * while (iter.hasNext)
+ * res = reducer(res, mapper(iter.next()))
+ * res
+ * }
* }
- * implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsTraversableOnce[Repr]): FilterMapImpl[fr.A,Repr] =
- * new FilterMapImpl(fr.conversion(r))
*
- * val l = List(1, 2, 3, 4, 5)
- * List(1, 2, 3, 4, 5) filterMap (i => if(i % 2 == 0) Some(i) else None)
- * // == List(2, 4)
- * }}}
+ * implicit def withExtensions[Repr](coll: Repr)(implicit traversable: IsTraversableLike[Repr]) =
+ * new ExtensionMethods(traversable.conversion(coll))
+ *
+ * // See it in action!
+ * List(1, 2, 3).mapReduce(_ * 2)(_ + _) // res0: Int = 12
+ * "Yeah, well, you know, that's just, like, your opinion, man.".mapReduce(x => 1)(_ + _) // res1: Int = 59
+ *}}}
+ *
+ * Here, we begin by creating a class `ExtensionMethods` which contains our
+ * `mapReduce` extension method. Note that `ExtensionMethods` takes a constructor
+ * argument `coll` of type `GenTraversableLike[A, Repr]`, where `A` represents the
+ * element type and `Repr` represents (typically) the collection type. The
+ * implementation of `mapReduce` itself is straightforward.
+ *
+ * The interesting bit is the implicit conversion `withExtensions`, which
+ * returns an instance of `ExtensionMethods`. This implicit conversion can
+ * only be applied if there is an implicit value `traversable` of type
+ * `IsTraversableLike[Repr]` in scope. Since `IsTraversableLike` provides
+ * value member `conversion`, which gives us a way to convert between whatever
+ * type we wish to add an extension method to (in this case, `Repr`) and
+ * `GenTraversableLike[A, Repr]`, we can now convert `coll` from type `Repr`
+ * to `GenTraversableLike[A, Repr]`. This allows us to create an instance of
+ * the `ExtensionMethods` class, which we pass our new
+ * `GenTraversableLike[A, Repr]` to.
+ *
+ * When the `mapReduce` method is called on some type of which it is not
+ * a member, implicit search is triggered. Because implicit conversion
+ * `withExtensions` is generic, it will be applied as long as an implicit
+ * value of type `IsTraversableLike[Repr]` can be found. Given that
+ * `IsTraversableLike` contains implicit members that return values of type
+ * `IsTraversableLike`, this requirement is typically satisfied, and the chain
+ * of interactions described in the previous paragraph is set into action.
+ * (See the `IsTraversableLike` companion object, which contains a precise
+ * specification of the available implicits.)
+ *
+ * ''Note'': Currently, it's not possible to combine the implicit conversion and
+ * the class with the extension methods into an implicit class due to
+ * limitations of type inference.
+ *
+ * ===Implementing `IsTraversableLike` for New Types===
+ *
+ * One must simply provide an implicit value of type `IsTraversableLike`
+ * specific to the new type, or an implicit conversion which returns an
+ * instance of `IsTraversableLike` specific to the new type.
+ *
+ * Below is an example of an implementation of the `IsTraversableLike` trait
+ * where the `Repr` type is `String`.
+ *
+ *{{{
+ * implicit val stringRepr: IsTraversableLike[String] { type A = Char } =
+ * new IsTraversableLike[String] {
+ * type A = Char
+ * val conversion = implicitly[String => GenTraversableLike[Char, String]]
+ * }
+ *}}}
*
* @author Miles Sabin
* @author J. Suereth
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index 802e16605d..02c10700b1 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -77,6 +77,7 @@ extends scala.collection.AbstractSeq[Int]
final val terminalElement = start + numRangeElements * step
override def last = if (isEmpty) Nil.last else lastElement
+ override def head = if (isEmpty) Nil.head else start
override def min[A1 >: Int](implicit ord: Ordering[A1]): Int =
if (ord eq Ordering.Int) {
diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala
index fd1364c2dc..cc145134c4 100644
--- a/src/library/scala/util/Properties.scala
+++ b/src/library/scala/util/Properties.scala
@@ -102,7 +102,7 @@ private[scala] trait PropertiesTrait {
* or "version (unknown)" if it cannot be determined.
*/
val versionString = "version " + scalaPropOrElse("version.number", "(unknown)")
- val copyrightString = scalaPropOrElse("copyright.string", "(c) 2002-2011 LAMP/EPFL")
+ val copyrightString = scalaPropOrElse("copyright.string", "Copyright 2002-2013, LAMP/EPFL")
/** This is the encoding to use reading in source files, overridden with -encoding
* Note that it uses "prop" i.e. looks in the scala jar, not the system properties.
diff --git a/src/manual/scala/man1/scaladoc.scala b/src/manual/scala/man1/scaladoc.scala
index 34c58b6b8e..1737c5efa0 100644
--- a/src/manual/scala/man1/scaladoc.scala
+++ b/src/manual/scala/man1/scaladoc.scala
@@ -77,8 +77,8 @@ object scaladoc extends Command {
CmdOption("doc-source-url", Argument("url")),
"Define a URL to be concatenated with source locations for link to source files."),
Definition(
- CmdOption("doc-external-uris", Argument("external-doc")),
- "Define comma-separated list of file://classpath_entry_path#doc_URL URIs for linking to external dependencies."))),
+ CmdOption("doc-external-doc", Argument("external-doc")),
+ "Define a comma-separated list of classpath_entry_path#doc_URL pairs describing external dependencies."))),
Section("Compiler Options",
DefinitionList(
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index bfd18f6a43..c2637e6967 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -593,6 +593,26 @@ trait Types extends api.Types { self: SymbolTable =>
/** Expands type aliases. */
def dealias = this
+ /** Repeatedly apply widen and dealias until they have no effect.
+ * This compensates for the fact that type aliases can hide beneath
+ * singleton types and singleton types can hide inside type aliases.
+ */
+ def dealiasWiden: Type = (
+ if (this ne widen) widen.dealiasWiden
+ else if (this ne dealias) dealias.dealiasWiden
+ else this
+ )
+
+ /** All the types encountered in the course of dealiasing/widening,
+ * including each intermediate beta reduction step (whereas calling
+ * dealias applies as many as possible.)
+ */
+ def dealiasWidenChain: List[Type] = this :: (
+ if (this ne widen) widen.dealiasWidenChain
+ else if (this ne betaReduce) betaReduce.dealiasWidenChain
+ else Nil
+ )
+
def etaExpand: Type = this
/** Performs a single step of beta-reduction on types.
@@ -3173,23 +3193,20 @@ trait Types extends api.Types { self: SymbolTable =>
* Checks subtyping of higher-order type vars, and uses variances as defined in the
* type parameter we're trying to infer (the result will be sanity-checked later).
*/
- def unifyFull(tpe: Type) = {
- // The alias/widen variations are often no-ops.
- val tpes = (
- if (isLowerBound) List(tpe, tpe.widen, tpe.dealias, tpe.widen.dealias).distinct
- else List(tpe)
- )
- tpes exists { tp =>
- val lhs = if (isLowerBound) tp.typeArgs else typeArgs
- val rhs = if (isLowerBound) typeArgs else tp.typeArgs
-
- sameLength(lhs, rhs) && {
+ def unifyFull(tpe: Type): Boolean = {
+ def unifySpecific(tp: Type) = {
+ sameLength(typeArgs, tp.typeArgs) && {
+ val lhs = if (isLowerBound) tp.typeArgs else typeArgs
+ val rhs = if (isLowerBound) typeArgs else tp.typeArgs
// this is a higher-kinded type var with same arity as tp.
// side effect: adds the type constructor itself as a bound
addBound(tp.typeConstructor)
isSubArgs(lhs, rhs, params, AnyDepth)
}
}
+ // The type with which we can successfully unify can be hidden
+ // behind singleton types and type aliases.
+ tpe.dealiasWidenChain exists unifySpecific
}
// There's a <: test taking place right now, where tp is a concrete type and this is a typevar
@@ -3282,7 +3299,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (constr.instValid) constr.inst
// get here when checking higher-order subtyping of the typevar by itself
// TODO: check whether this ever happens?
- else if (isHigherKinded) typeFun(params, applyArgs(params map (_.typeConstructor)))
+ else if (isHigherKinded) logResult("Normalizing HK $this")(typeFun(params, applyArgs(params map (_.typeConstructor))))
else super.normalize
)
override def typeSymbol = origin.typeSymbol
@@ -3754,7 +3771,7 @@ trait Types extends api.Types { self: SymbolTable =>
def existentialAbstraction(tparams: List[Symbol], tpe0: Type): Type =
if (tparams.isEmpty) tpe0
else {
- val tpe = deAlias(tpe0)
+ val tpe = normalizeAliases(tpe0)
val tpe1 = new ExistentialExtrapolation(tparams) extrapolate tpe
var tparams0 = tparams
var tparams1 = tparams0 filter tpe1.contains
@@ -3768,13 +3785,16 @@ trait Types extends api.Types { self: SymbolTable =>
newExistentialType(tparams1, tpe1)
}
- /** Remove any occurrences of type aliases from this type */
- object deAlias extends TypeMap {
- def apply(tp: Type): Type = mapOver {
- tp match {
- case TypeRef(pre, sym, args) if sym.isAliasType => tp.normalize
- case _ => tp
- }
+ /** Normalize any type aliases within this type (@see Type#normalize).
+ * Note that this depends very much on the call to "normalize", not "dealias",
+ * so it is no longer carries the too-stealthy name "deAlias".
+ */
+ object normalizeAliases extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case TypeRef(_, sym, _) if sym.isAliasType =>
+ def msg = if (tp.isHigherKinded) s"Normalizing type alias function $tp" else s"Dealiasing type alias $tp"
+ mapOver(logResult(msg)(tp.normalize))
+ case _ => mapOver(tp)
}
}
diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
index 81368df7a6..ec3501d5bc 100644
--- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
+++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
@@ -47,4 +47,6 @@ abstract class MutableSettings extends AbsSettings {
def XoldPatmat: BooleanSetting
def XnoPatmatAnalysis: BooleanSetting
def XfullLubs: BooleanSetting
+ def companionsInPkgObjs: BooleanSetting
+
}
diff --git a/src/reflect/scala/reflect/runtime/Settings.scala b/src/reflect/scala/reflect/runtime/Settings.scala
index 0e0cf3fc40..2d5b76f094 100644
--- a/src/reflect/scala/reflect/runtime/Settings.scala
+++ b/src/reflect/scala/reflect/runtime/Settings.scala
@@ -43,6 +43,7 @@ private[reflect] class Settings extends MutableSettings {
val printtypes = new BooleanSetting(false)
val uniqid = new BooleanSetting(false)
val verbose = new BooleanSetting(false)
+ val companionsInPkgObjs = new BooleanSetting(false)
val Yrecursion = new IntSetting(0)
val maxClassfileName = new IntSetting(255)
diff --git a/src/swing/scala/swing/SwingActor.scala b/src/swing/scala/swing/SwingActor.scala
index 6692180aac..035e979bee 100644
--- a/src/swing/scala/swing/SwingActor.scala
+++ b/src/swing/scala/swing/SwingActor.scala
@@ -10,26 +10,6 @@
package scala.swing
-import scala.actors._
-
// Dummy to keep ant from recompiling on every run.
+@deprecated("Will be removed in 2.11.0", "2.10.1")
trait SwingActor { }
-
-/*object SwingActor {
- /**
- * Similar to Actor.actor, but creates an instance of a SwingActor.
- */
- def apply(body: => Unit): Actor =
- new SwingActor { def act() = body }.start()
-}
-
-/**
- * An actor that runs on the Swing event dispatching thread (EDT).
- */
-abstract class SwingActor extends Actor {
- override val scheduler = new SchedulerAdapter {
- def execute(op: =>Unit) = Swing onEDT op
- def onTerminate(a: Actor)(op: => Unit) {}
- def terminated(a: Actor) {}
- }
-}*/
diff --git a/src/swing/scala/swing/SwingWorker.scala b/src/swing/scala/swing/SwingWorker.scala
index 0e514e38a7..f4eeb5824a 100644
--- a/src/swing/scala/swing/SwingWorker.scala
+++ b/src/swing/scala/swing/SwingWorker.scala
@@ -2,10 +2,12 @@ package scala.swing
import scala.actors._
+@deprecated("Will be removed in 2.11.0", "2.10.1")
object SwingWorker {
}
+@deprecated("Depends on the deprecated package scala.actors. Will be removed in 2.11.0", "2.10.1")
abstract class SwingWorker extends Actor {
def queue() {
@@ -18,4 +20,4 @@ abstract class SwingWorker extends Actor {
private var _cancelled = false
def cancelled: Boolean = _cancelled
def cancelled_=(b: Boolean) { _cancelled = b }
-} \ No newline at end of file
+}