summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/actors/scala/actors/scheduler/ThreadPoolConfig.scala5
-rw-r--r--src/build/maven/maven-deploy.xml6
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Typers.scala2
-rw-r--r--src/compiler/scala/reflect/reify/package.scala7
-rw-r--r--src/compiler/scala/reflect/reify/phases/Reshape.scala16
-rw-r--r--src/compiler/scala/tools/ant/templates/tool-windows.tmpl70
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala9
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala6
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala24
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala5
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala5
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala150
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Solving.scala9
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala21
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala7
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala9
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala59
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala11
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala18
-rw-r--r--src/compiler/scala/tools/nsc/util/ClassPath.scala5
-rw-r--r--src/compiler/scala/tools/nsc/util/MsilClassPath.scala3
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala17
-rw-r--r--src/eclipse/README.md3
-rw-r--r--src/eclipse/test-junit/.classpath12
-rw-r--r--src/eclipse/test-junit/.project35
-rw-r--r--src/library/scala/collection/concurrent/TrieMap.scala7
-rw-r--r--src/library/scala/concurrent/Future.scala188
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala275
-rw-r--r--src/library/scala/math/BigDecimal.scala2
-rw-r--r--src/library/scala/math/BigInt.scala2
-rw-r--r--src/library/scala/util/Properties.scala51
-rw-r--r--src/partest/scala/tools/partest/DirectTest.scala27
-rw-r--r--src/partest/scala/tools/partest/PartestTask.scala5
-rw-r--r--src/reflect/scala/reflect/internal/CapturedVariables.scala2
-rw-r--r--src/reflect/scala/reflect/internal/ClassfileConstants.scala36
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala13
-rw-r--r--src/reflect/scala/reflect/internal/Importers.scala7
-rw-r--r--src/reflect/scala/reflect/internal/Printers.scala2
-rw-r--r--src/reflect/scala/reflect/internal/StdAttachments.scala31
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala7
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala18
-rw-r--r--src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala13
-rw-r--r--src/reflect/scala/reflect/runtime/JavaMirrors.scala30
-rw-r--r--src/reflect/scala/reflect/runtime/ReflectionUtils.scala6
-rw-r--r--src/reflect/scala/reflect/runtime/SymbolLoaders.scala11
48 files changed, 860 insertions, 399 deletions
diff --git a/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala b/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
index ee8776f397..bfd4e7ac40 100644
--- a/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
+++ b/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
@@ -42,10 +42,7 @@ private[actors] object ThreadPoolConfig {
(propIsSetTo("actors.enableForkJoin", "true") || {
Debug.info(this+": java.version = "+javaVersion)
Debug.info(this+": java.vm.vendor = "+javaVmVendor)
-
- // on IBM J9 1.6 do not use ForkJoinPool
- // XXX this all needs to go into Properties.
- isJavaAtLeast("1.6") && ((javaVmVendor contains "Oracle") || (javaVmVendor contains "Sun") || (javaVmVendor contains "Apple"))
+ isJavaAtLeast("1.6")
})
catch {
case _: SecurityException => false
diff --git a/src/build/maven/maven-deploy.xml b/src/build/maven/maven-deploy.xml
index bd946bf0f3..e70173319e 100644
--- a/src/build/maven/maven-deploy.xml
+++ b/src/build/maven/maven-deploy.xml
@@ -164,11 +164,7 @@
<attribute name="repository" />
<attribute name="version" />
<sequential>
- <deploy-remote name="scala-library" version="@{version}" repository="@{repository}">
- <extra-attachments>
- <artifact:attach type="jar" file="scala-library/scala-library-docs.jar" classifier="javadoc" />
- </extra-attachments>
- </deploy-remote>
+ <deploy-remote name="scala-library" version="@{version}" repository="@{repository}"/>
<deploy-remote name="jline" version="@{version}" repository="@{repository}"/>
<deploy-remote name="scala-reflect" version="@{version}" repository="@{repository}"/>
<deploy-remote name="scala-compiler" version="@{version}" repository="@{repository}" />
diff --git a/src/compiler/scala/reflect/macros/runtime/Typers.scala b/src/compiler/scala/reflect/macros/runtime/Typers.scala
index 4d333f180b..a51bee0fe8 100644
--- a/src/compiler/scala/reflect/macros/runtime/Typers.scala
+++ b/src/compiler/scala/reflect/macros/runtime/Typers.scala
@@ -22,7 +22,7 @@ trait Typers {
// typechecking uses silent anyways (e.g. in typedSelect), so you'll only waste your time
// I'd advise fixing the root cause: finding why the context is not set to report errors
// (also see reflect.runtime.ToolBoxes.typeCheckExpr for a workaround that might work for you)
- wrapper(callsiteTyper.silent(_.typed(tree, universe.analyzer.EXPRmode, pt)) match {
+ wrapper(callsiteTyper.silent(_.typed(tree, universe.analyzer.EXPRmode, pt), reportAmbiguousErrors = false) match {
case universe.analyzer.SilentResultValue(result) =>
macroLogVerbose(result)
result
diff --git a/src/compiler/scala/reflect/reify/package.scala b/src/compiler/scala/reflect/reify/package.scala
index 55f8684df2..6777bb0a50 100644
--- a/src/compiler/scala/reflect/reify/package.scala
+++ b/src/compiler/scala/reflect/reify/package.scala
@@ -45,18 +45,21 @@ package object reify {
def reifyType(global: Global)(typer: global.analyzer.Typer,universe: global.Tree, mirror: global.Tree, tpe: global.Type, concrete: Boolean = false): global.Tree =
mkReifier(global)(typer, universe, mirror, tpe, concrete = concrete).reification.asInstanceOf[global.Tree]
- def reifyRuntimeClass(global: Global)(typer0: global.analyzer.Typer, tpe: global.Type, concrete: Boolean = true): global.Tree = {
+ def reifyRuntimeClass(global: Global)(typer0: global.analyzer.Typer, tpe0: global.Type, concrete: Boolean = true): global.Tree = {
import global._
import definitions._
import analyzer.enclosingMacroPosition
+ // SI-7375
+ val tpe = tpe0.dealiasWiden
+
if (tpe.isSpliceable) {
val classTagInScope = typer0.resolveClassTag(enclosingMacroPosition, tpe, allowMaterialization = false)
if (!classTagInScope.isEmpty) return Select(classTagInScope, nme.runtimeClass)
if (concrete) throw new ReificationException(enclosingMacroPosition, "tpe %s is an unresolved spliceable type".format(tpe))
}
- tpe.normalize match {
+ tpe match {
case TypeRef(_, ArrayClass, componentTpe :: Nil) =>
val componentErasure = reifyRuntimeClass(global)(typer0, componentTpe, concrete)
gen.mkMethodCall(arrayClassMethod, List(componentErasure))
diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala
index a320718084..535a933c73 100644
--- a/src/compiler/scala/reflect/reify/phases/Reshape.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala
@@ -91,20 +91,20 @@ trait Reshape {
private def undoMacroExpansion(tree: Tree): Tree =
tree.attachments.get[MacroExpansionAttachment] match {
case Some(MacroExpansionAttachment(original)) =>
+ def mkImplicitly(tp: Type) = atPos(tree.pos)(
+ gen.mkNullaryCall(Predef_implicitly, List(tp))
+ )
+ val sym = original.symbol
original match {
// this hack is necessary until I fix implicit macros
// so far tag materialization is implemented by sneaky macros hidden in scala-compiler.jar
// hence we cannot reify references to them, because noone will be able to see them later
// when implicit macros are fixed, these sneaky macros will move to corresponding companion objects
// of, say, ClassTag or TypeTag
- case Apply(TypeApply(_, List(tt)), _) if original.symbol == materializeClassTag =>
- gen.mkNullaryCall(Predef_implicitly, List(appliedType(ClassTagClass, tt.tpe)))
- case Apply(TypeApply(_, List(tt)), List(pre)) if original.symbol == materializeWeakTypeTag =>
- gen.mkNullaryCall(Predef_implicitly, List(typeRef(pre.tpe, WeakTypeTagClass, List(tt.tpe))))
- case Apply(TypeApply(_, List(tt)), List(pre)) if original.symbol == materializeTypeTag =>
- gen.mkNullaryCall(Predef_implicitly, List(typeRef(pre.tpe, TypeTagClass, List(tt.tpe))))
- case _ =>
- original
+ case Apply(TypeApply(_, List(tt)), _) if sym == materializeClassTag => mkImplicitly(appliedType(ClassTagClass, tt.tpe))
+ case Apply(TypeApply(_, List(tt)), List(pre)) if sym == materializeWeakTypeTag => mkImplicitly(typeRef(pre.tpe, WeakTypeTagClass, List(tt.tpe)))
+ case Apply(TypeApply(_, List(tt)), List(pre)) if sym == materializeTypeTag => mkImplicitly(typeRef(pre.tpe, TypeTagClass, List(tt.tpe)))
+ case _ => original
}
case _ => tree
}
diff --git a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
index bd6cf561b9..1288eb0b7c 100644
--- a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
@@ -22,13 +22,74 @@ if "%~1"=="-toolcp" (
goto another_param
)
-set _LINE_PARAMS=%1
+rem We keep in _JAVA_PARAMS all -J-prefixed and -D-prefixed arguments
+set _JAVA_PARAMS=
+
+if [%1]==[] goto param_afterloop
+set _TEST_PARAM=%~1
+if not "%_TEST_PARAM:~0,1%"=="-" goto param_afterloop
+
+rem ignore -e "scala code"
+if "%_TEST_PARAM:~0,2%"=="-e" (
+ shift
+ shift
+ if [%1]==[] goto param_afterloop
+)
+
+set _TEST_PARAM=%~1
+if "%_TEST_PARAM:~0,2%"=="-J" (
+ set _JAVA_PARAMS=%_TEST_PARAM:~2%
+)
+
+if "%_TEST_PARAM:~0,2%"=="-D" (
+ rem test if this was double-quoted property "-Dprop=42"
+ for /F "delims== tokens=1-2" %%G in ("%_TEST_PARAM%") DO (
+ if not "%%G" == "%_TEST_PARAM%" (
+ rem double quoted: "-Dprop=42" -> -Dprop="42"
+ set _JAVA_PARAMS=%%G="%%H"
+ ) else if [%2] neq [] (
+ rem it was a normal property: -Dprop=42 or -Drop="42"
+ set _JAVA_PARAMS=%_TEST_PARAM%=%2
+ shift
+ )
+ )
+)
+
:param_loop
shift
+
if [%1]==[] goto param_afterloop
-set _LINE_PARAMS=%_LINE_PARAMS% %1
+set _TEST_PARAM=%~1
+if not "%_TEST_PARAM:~0,1%"=="-" goto param_afterloop
+
+rem ignore -e "scala code"
+if "%_TEST_PARAM:~0,2%"=="-e" (
+ shift
+ shift
+ if [%1]==[] goto param_afterloop
+)
+
+set _TEST_PARAM=%~1
+if "%_TEST_PARAM:~0,2%"=="-J" (
+ set _JAVA_PARAMS=%_JAVA_PARAMS% %_TEST_PARAM:~2%
+)
+
+if "%_TEST_PARAM:~0,2%"=="-D" (
+ rem test if this was double-quoted property "-Dprop=42"
+ for /F "delims== tokens=1-2" %%G in ("%_TEST_PARAM%") DO (
+ if not "%%G" == "%_TEST_PARAM%" (
+ rem double quoted: "-Dprop=42" -> -Dprop="42"
+ set _JAVA_PARAMS=%_JAVA_PARAMS% %%G="%%H"
+ ) else if [%2] neq [] (
+ rem it was a normal property: -Dprop=42 or -Drop="42"
+ set _JAVA_PARAMS=%_JAVA_PARAMS% %_TEST_PARAM%=%2
+ shift
+ )
+ )
+)
goto param_loop
:param_afterloop
+
if "%OS%" NEQ "Windows_NT" (
echo "Warning, your version of Windows is not supported. Attempting to start scala anyway."
)
@@ -51,6 +112,9 @@ rem We use the value of the JAVA_OPTS environment variable if defined
set _JAVA_OPTS=%JAVA_OPTS%
if not defined _JAVA_OPTS set _JAVA_OPTS=@javaflags@
+rem We append _JAVA_PARAMS java arguments to JAVA_OPTS if necessary
+if defined _JAVA_PARAMS set _JAVA_OPTS=%_JAVA_OPTS% %_JAVA_PARAMS%
+
set _TOOL_CLASSPATH=@classpath@
if "%_TOOL_CLASSPATH%"=="" (
for %%f in ("!_SCALA_HOME!\lib\*") do call :add_cpath "%%f"
@@ -89,4 +153,4 @@ goto :eof
@@endlocal
REM exit code fix, see http://stackoverflow.com/questions/4632891/exiting-batch-with-exit-b-x-where-x-1-acts-as-if-command-completed-successfu
-@@%COMSPEC% /C exit %errorlevel% >nul
+@@"%COMSPEC%" /C exit %errorlevel% >nul
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index b8791c15dc..50398824e1 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -1474,8 +1474,9 @@ self =>
* }}}
*/
def postfixExpr(): Tree = {
- val base = opstack
- var top = prefixExpr()
+ val start = in.offset
+ val base = opstack
+ var top = prefixExpr()
while (isIdent) {
top = reduceStack(isExpr = true, base, top, precedence(in.name), leftAssoc = treeInfo.isLeftAssoc(in.name))
@@ -1493,9 +1494,7 @@ self =>
val topinfo = opstack.head
opstack = opstack.tail
val od = stripParens(reduceStack(isExpr = true, base, topinfo.operand, 0, leftAssoc = true))
- return atPos(od.pos.startOrPoint, topinfo.offset) {
- new PostfixSelect(od, topinfo.operator.encode)
- }
+ return makePostfixSelect(start, topinfo.offset, od, topinfo.operator)
}
}
reduceStack(isExpr = true, base, top, 0, leftAssoc = true)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index b5771454f8..1412bff0ab 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -245,6 +245,12 @@ abstract class TreeBuilder {
Assign(lhs, rhs)
}
+ /** Tree for `od op`, start is start0 if od.pos is borked. */
+ def makePostfixSelect(start0: Int, end: Int, od: Tree, op: Name): Tree = {
+ val start = if (od.pos.isDefined) od.pos.startOrPoint else start0
+ atPos(r2p(start, end, end + op.length)) { new PostfixSelect(od, op.encode) }
+ }
+
/** A type tree corresponding to (possibly unary) intersection type */
def makeIntersectionTypeTree(tps: List[Tree]): Tree =
if (tps.tail.isEmpty) tps.head
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 47a8b1f947..2955986a7e 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -626,7 +626,7 @@ abstract class ClassfileParser {
sawPrivateConstructor = true
in.skip(2); skipAttributes()
} else {
- if ((sflags & PRIVATE) != 0L && global.settings.optimise.value) {
+ if ((sflags & PRIVATE) != 0L && global.settings.optimise.value) { // TODO this should be !optimize, no? See c4181f656d.
in.skip(4); skipAttributes()
} else {
val name = pool.getName(in.nextChar)
@@ -636,7 +636,7 @@ abstract class ClassfileParser {
info match {
case MethodType(params, restpe) =>
// if this is a non-static inner class, remove the explicit outer parameter
- val newParams = innerClasses getEntry currentClass match {
+ val paramsNoOuter = innerClasses getEntry currentClass match {
case Some(entry) if !isScalaRaw && !isStatic(entry.jflags) =>
/* About `clazz.owner.isPackage` below: SI-5957
* For every nested java class A$B, there are two symbols in the scala compiler.
@@ -650,6 +650,15 @@ abstract class ClassfileParser {
case _ =>
params
}
+ val newParams = paramsNoOuter match {
+ case (init :+ tail) if (jflags & JAVA_ACC_SYNTHETIC) != 0L =>
+ // SI-7455 strip trailing dummy argument ("access constructor tag") from synthetic constructors which
+ // are added when an inner class needs to access a private constructor.
+ init
+ case _ =>
+ paramsNoOuter
+ }
+
info = MethodType(newParams, clazz.tpe)
}
sym.setInfo(info)
@@ -943,9 +952,14 @@ abstract class ClassfileParser {
case ENUM_TAG =>
val t = pool.getType(index)
val n = pool.getName(in.nextChar)
- val s = t.typeSymbol.companionModule.info.decls.lookup(n)
- assert(s != NoSymbol, t)
- Some(LiteralAnnotArg(Constant(s)))
+ val module = t.typeSymbol.companionModule
+ val s = module.info.decls.lookup(n)
+ if (s != NoSymbol) Some(LiteralAnnotArg(Constant(s)))
+ else {
+ warning(s"""While parsing annotations in ${in.file}, could not find $n in enum $module.\nThis is likely due to an implementation restriction: an annotation argument cannot refer to a member of the annotated class (SI-7014).""")
+ None
+ }
+
case ARRAY_TAG =>
val arr = new ArrayBuffer[ClassfileAnnotArg]()
var hasError = false
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index 4891ef2fd1..1a1137f402 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -523,7 +523,10 @@ abstract class Constructors extends Transform with ast.TreeDSL {
/** Return a pair consisting of (all statements up to and including superclass and trait constr calls, rest) */
def splitAtSuper(stats: List[Tree]) = {
- def isConstr(tree: Tree) = (tree.symbol ne null) && tree.symbol.isConstructor
+ def isConstr(tree: Tree): Boolean = tree match {
+ case Block(_, expr) => isConstr(expr) // SI-6481 account for named argument blocks
+ case _ => (tree.symbol ne null) && tree.symbol.isConstructor
+ }
val (pre, rest0) = stats span (!isConstr(_))
val (supercalls, rest) = rest0 span (isConstr(_))
(pre ::: supercalls, rest)
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index ead6ef288c..76249974ac 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -341,11 +341,6 @@ abstract class Erasure extends AddInterfaces
}
}
- // Each primitive value class has its own getClass for ultra-precise class object typing.
- private lazy val primitiveGetClassMethods = Set[Symbol](Any_getClass, AnyVal_getClass) ++ (
- ScalaValueClasses map (_.tpe member nme.getClass_)
- )
-
// ## requires a little translation
private lazy val poundPoundMethods = Set[Symbol](Any_##, Object_##)
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index c9c68d080d..e92450c9c0 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -119,7 +119,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* @param mixinClass The mixin class that produced the superaccessor
*/
private def rebindSuper(base: Symbol, member: Symbol, mixinClass: Symbol): Symbol =
- afterPickler {
+ afterSpecialize {
var bcs = base.info.baseClasses.dropWhile(mixinClass != _).tail
var sym: Symbol = NoSymbol
debuglog("starting rebindsuper " + base + " " + member + ":" + member.tpe +
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 39716d4ed0..7e85647592 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -844,7 +844,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
debuglog("%s expands to %s in %s".format(sym, specMember.name.decode, pp(env)))
info(specMember) = NormalizedMember(sym)
newOverload(sym, specMember, env)
- owner.info.decls.enter(specMember)
+ // if this is a class, we insert the normalized member in scope,
+ // if this is a method, there's no attached scope for it (EmptyScope)
+ val decls = owner.info.decls
+ if (decls != EmptyScope)
+ decls.enter(specMember)
specMember
}
}
@@ -1264,7 +1268,35 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
protected override def newBodyDuplicator(context: Context) = new BodyDuplicator(context)
+ }
+ /** Introduced to fix SI-7343: Phase ordering problem between Duplicators and Specialization.
+ * brief explanation: specialization rewires class parents during info transformation, and
+ * the new info then guides the tree changes. But if a symbol is created during duplication,
+ * which runs after specialization, its info is not visited and thus the corresponding tree
+ * is not specialized. One manifestation is the following:
+ * ```
+ * object Test {
+ * class Parent[@specialized(Int) T]
+ *
+ * def spec_method[@specialized(Int) T](t: T, expectedXSuper: String) = {
+ * class X extends Parent[T]()
+ * // even in the specialized variant, the local X class
+ * // doesn't extend Parent$mcI$sp, since its symbol has
+ * // been created after specialization and was not seen
+ * // by specialzation's info transformer.
+ * ...
+ * }
+ * }
+ * ```
+ * We fix this by forcing duplication to take place before specialization.
+ *
+ * Note: The constructors phase (which also uses duplication) comes after erasure and uses the
+ * post-erasure typer => we must protect it from the beforeSpecialization phase shifting.
+ */
+ class SpecializationDuplicator(casts: Map[Symbol, Type]) extends Duplicator(casts) {
+ override def retyped(context: Context, tree: Tree, oldThis: Symbol, newThis: Symbol, env: scala.collection.Map[Symbol, Type]): Tree =
+ beforeSpecialize(super.retyped(context, tree, oldThis, newThis, env))
}
/** A tree symbol substituter that substitutes on type skolems.
@@ -1392,7 +1424,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
debuglog("[specSym] checking for rerouting: %s with \n\tsym.tpe: %s, \n\ttree.tpe: %s \n\tenv: %s \n\tname: %s"
.format(tree, symbol.tpe, tree.tpe, env, specializedName(symbol, env)))
- if (!env.isEmpty) { // a method?
+ if (env.nonEmpty) { // a method?
val specCandidates = qual.tpe.member(specializedName(symbol, env))
val specMember = specCandidates suchThat { s =>
doesConform(symbol, tree.tpe, qual.tpe.memberType(s), env)
@@ -1410,6 +1442,34 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
} else None
}
+ /** Computes residual type parameters after rewiring, like "String" in the following example:
+ * ```
+ * def specMe[@specialized T, U](t: T, u: U) = ???
+ * specMe[Int, String](1, "2") => specMe$mIc$sp[String](1, "2")
+ * ```
+ */
+ def computeResidualTypeVars(baseTree: Tree, specTree: Tree, baseTargs: List[Tree], env: TypeEnv) = {
+ val baseSym: Symbol = baseTree.symbol
+ val specSym: Symbol = specTree.symbol
+ val residualTargs = baseSym.info.typeParams zip baseTargs collect {
+ case (tvar, targ) if !env.contains(tvar) || !isPrimitiveValueClass(env(tvar).typeSymbol) => targ
+ }
+
+ if (specSym.info.typeParams.isEmpty && residualTargs.nonEmpty) {
+ log("!!! Type args to be applied, but symbol says no parameters: " + ((specSym.defString, residualTargs)))
+ baseTree
+ }
+ else {
+ ifDebug(assert(residualTargs.length == specSym.info.typeParams.length,
+ "residual: %s, tparams: %s, env: %s".format(residualTargs, specSym.info.typeParams, env))
+ )
+
+ val tree1 = gen.mkTypeApply(specTree, residualTargs)
+ debuglog("rewrote " + tree + " to " + tree1)
+ localTyper.typedOperator(atPos(tree.pos)(tree1))
+ }
+ }
+
curTree = tree
tree match {
case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
@@ -1442,43 +1502,44 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
transformSuperApply
+ // This rewires calls to specialized methods defined in a class (which have a receiver)
+ // class C {
+ // def foo[@specialized T](t: T): T = t
+ // C.this.foo(3) // TypeApply(Select(This(C), foo), List(Int)) => C.this.foo$mIc$sp(3)
+ // }
case TypeApply(sel @ Select(qual, name), targs)
- if (!specializedTypeVars(symbol.info).isEmpty && name != nme.CONSTRUCTOR) =>
- def transformTypeApply = {
+ if (specializedTypeVars(symbol.info).nonEmpty && name != nme.CONSTRUCTOR) =>
debuglog("checking typeapp for rerouting: " + tree + " with sym.tpe: " + symbol.tpe + " tree.tpe: " + tree.tpe)
val qual1 = transform(qual)
- // log(">>> TypeApply: " + tree + ", qual1: " + qual1)
+ log(">>> TypeApply: " + tree + ", qual1: " + qual1)
specSym(qual1) match {
case Some(specMember) =>
debuglog("found " + specMember.fullName)
ifDebug(assert(symbol.info.typeParams.length == targs.length, symbol.info.typeParams + " / " + targs))
val env = typeEnv(specMember)
- val residualTargs = symbol.info.typeParams zip targs collect {
- case (tvar, targ) if !env.contains(tvar) || !isPrimitiveValueClass(env(tvar).typeSymbol) => targ
- }
- // See SI-5583. Don't know why it happens now if it didn't before.
- if (specMember.info.typeParams.isEmpty && residualTargs.nonEmpty) {
- log("!!! Type args to be applied, but symbol says no parameters: " + ((specMember.defString, residualTargs)))
- localTyper.typed(sel)
- }
- else {
- ifDebug(assert(residualTargs.length == specMember.info.typeParams.length,
- "residual: %s, tparams: %s, env: %s".format(residualTargs, specMember.info.typeParams, env))
- )
-
- val tree1 = gen.mkTypeApply(Select(qual1, specMember), residualTargs)
- debuglog("rewrote " + tree + " to " + tree1)
- localTyper.typedOperator(atPos(tree.pos)(tree1)) // being polymorphic, it must be a method
- }
+ computeResidualTypeVars(tree, gen.mkAttributedSelect(qual1, specMember), targs, env)
case None =>
treeCopy.TypeApply(tree, treeCopy.Select(sel, qual1, name), super.transformTrees(targs))
// See pos/exponential-spec.scala - can't call transform on the whole tree again.
// super.transform(tree)
}
+
+ // This rewires calls to specialized methods defined in the local scope. For example:
+ // def outerMethod = {
+ // def foo[@specialized T](t: T): T = t
+ // foo(3) // TypeApply(Ident(foo), List(Int)) => foo$mIc$sp(3)
+ // }
+ case TypeApply(sel @ Ident(name), targs) if name != nme.CONSTRUCTOR =>
+ val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
+ if (env.isEmpty) super.transform(tree)
+ else {
+ overloads(symbol) find (_ matchesEnv env) match {
+ case Some(Overload(specMember, _)) => computeResidualTypeVars(tree, Ident(specMember), targs, env)
+ case _ => super.transform(tree)
+ }
}
- transformTypeApply
case Select(Super(_, _), _) if illegalSpecializedInheritance(currentClass) =>
val pos = tree.pos
@@ -1629,7 +1690,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
localTyper.typed(deriveDefDef(tree)(rhs => rhs))
}
}
- transformDefDef
+ expandInnerNormalizedMembers(transformDefDef)
+
+ case ddef @ DefDef(_, _, _, _, _, _) =>
+ val tree1 = expandInnerNormalizedMembers(tree)
+ super.transform(tree1)
case ValDef(_, _, _, _) if symbol.hasFlag(SPECIALIZED) && !symbol.isParamAccessor =>
def transformValDef = {
@@ -1637,7 +1702,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val tree1 = deriveValDef(tree)(_ => body(symbol.alias).duplicate)
debuglog("now typing: " + tree1 + " in " + tree.symbol.owner.fullName)
- val d = new Duplicator(emptyEnv)
+ val d = new SpecializationDuplicator(emptyEnv)
val newValDef = d.retyped(
localTyper.context1.asInstanceOf[d.Context],
tree1,
@@ -1654,6 +1719,39 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
+ /**
+ * This performs method specialization inside a scope other than a {class, trait, object}: could be another method
+ * or a value. This specialization is much simpler, since there is no need to record the new members in the class
+ * signature, their signatures are only visible locally. It works according to the usual logic:
+ * - we use normalizeMember to create the specialized symbols
+ * - we leave DefDef stubs in the tree that are later filled in by tree duplication and adaptation
+ * @see duplicateBody
+ */
+ private def expandInnerNormalizedMembers(tree: Tree) = tree match {
+ case ddef @ DefDef(_, _, _, vparams :: Nil, _, rhs)
+ if ddef.symbol.owner.isMethod &&
+ specializedTypeVars(ddef.symbol.info).nonEmpty &&
+ !ddef.symbol.hasFlag(SPECIALIZED) =>
+
+ val sym = ddef.symbol
+ val owner = sym.owner
+ val norm = normalizeMember(owner, sym, emptyEnv)
+
+ if (norm.length > 1) {
+ // record the body for duplication
+ body(sym) = rhs
+ parameters(sym) = vparams.map(_.symbol)
+ // to avoid revisiting the member, we can set the SPECIALIZED
+ // flag. nobody has to see this anyway :)
+ sym.setFlag(SPECIALIZED)
+ // create empty bodies for specializations
+ localTyper.typed(Block(norm.tail.map(sym => DefDef(sym, { vparamss => EmptyTree })), ddef))
+ } else
+ tree
+ case _ =>
+ tree
+ }
+
/** Duplicate the body of the given method `tree` to the new symbol `source`.
*
* Knowing that the method can be invoked only in the `castmap` type environment,
@@ -1664,7 +1762,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val symbol = tree.symbol
val meth = addBody(tree, source)
- val d = new Duplicator(castmap)
+ val d = new SpecializationDuplicator(castmap)
debuglog("-->d DUPLICATING: " + meth)
d.retyped(
localTyper.context1.asInstanceOf[d.Context],
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
index 843f831ea1..ec66bf6f20 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
@@ -208,15 +208,16 @@ trait Solving extends Logic {
withLit(findModelFor(dropUnit(f, unitLit)), unitLit)
case _ =>
// partition symbols according to whether they appear in positive and/or negative literals
- val pos = new mutable.HashSet[Sym]()
- val neg = new mutable.HashSet[Sym]()
+ // SI-7020 Linked- for deterministic counter examples.
+ val pos = new mutable.LinkedHashSet[Sym]()
+ val neg = new mutable.LinkedHashSet[Sym]()
f.foreach{_.foreach{ lit =>
if (lit.pos) pos += lit.sym else neg += lit.sym
}}
// appearing in both positive and negative
- val impures = pos intersect neg
+ val impures: mutable.LinkedHashSet[Sym] = pos intersect neg
// appearing only in either positive/negative positions
- val pures = (pos ++ neg) -- impures
+ val pures: mutable.LinkedHashSet[Sym] = (pos ++ neg) -- impures
if (pures nonEmpty) {
val pureSym = pures.head
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index b9cff5b2d3..5d6d094b44 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -697,7 +697,7 @@ trait ContextErrors {
def msgForLog = if (msg != null && (msg contains "exception during macro expansion")) msg.split(EOL).drop(1).headOption.getOrElse("?") else msg
macroLogLite("macro expansion has failed: %s".format(msgForLog))
val errorPos = if (pos != NoPosition) pos else (if (expandee.pos != NoPosition) expandee.pos else enclosingMacroPosition)
- if (msg != null) context.error(pos, msg) // issueTypeError(PosAndMsgTypeError(..)) won't work => swallows positions
+ if (msg != null) context.error(errorPos, msg) // issueTypeError(PosAndMsgTypeError(..)) won't work => swallows positions
setError(expandee)
throw MacroExpansionException
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index f6142a81be..25a1228bf6 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -37,14 +37,12 @@ abstract class Duplicators extends Analyzer {
envSubstitution = new SubstSkolemsTypeMap(env.keysIterator.toList, env.valuesIterator.toList)
debuglog("retyped with env: " + env)
+
newBodyDuplicator(context).typed(tree)
}
protected def newBodyDuplicator(context: Context) = new BodyDuplicator(context)
- def retypedMethod(context: Context, tree: Tree, oldThis: Symbol, newThis: Symbol): Tree =
- (newBodyDuplicator(context)).retypedMethod(tree.asInstanceOf[DefDef], oldThis, newThis)
-
/** Return the special typer for duplicate method bodies. */
override def newTyper(context: Context): Typer =
newBodyDuplicator(context)
@@ -186,20 +184,6 @@ abstract class Duplicators extends Analyzer {
stats.foreach(invalidate(_, owner))
}
- def retypedMethod(ddef: DefDef, oldThis: Symbol, newThis: Symbol): Tree = {
- oldClassOwner = oldThis
- newClassOwner = newThis
- invalidateAll(ddef.tparams)
- mforeach(ddef.vparamss) { vdef =>
- invalidate(vdef)
- vdef.tpe = null
- }
- ddef.symbol = NoSymbol
- enterSym(context, ddef)
- debuglog("remapping this of " + oldClassOwner + " to " + newClassOwner)
- typed(ddef)
- }
-
private def inspectTpe(tpe: Type) = {
tpe match {
case MethodType(_, res) =>
@@ -401,7 +385,8 @@ abstract class Duplicators extends Analyzer {
tree.symbol = NoSymbol // maybe we can find a more specific member in a subclass of Any (see AnyVal members, like ==)
}
val ntree = castType(tree, pt)
- super.typed(ntree, mode, pt)
+ val res = super.typed(ntree, mode, pt)
+ res
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index 816f977890..d6ec5f2cb0 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -713,9 +713,11 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
var expectedTpe = expandee.tpe
if (isNullaryInvocation(expandee)) expectedTpe = expectedTpe.finalResultType
- var typechecked = typecheck("macro def return type", expanded, expectedTpe)
- typechecked = typecheck("expected type", typechecked, pt)
- typechecked
+ // also see http://groups.google.com/group/scala-internals/browse_thread/thread/492560d941b315cc
+ val expanded0 = duplicateAndKeepPositions(expanded)
+ val expanded1 = typecheck("macro def return type", expanded0, expectedTpe)
+ val expanded2 = typecheck("expected type", expanded1, pt)
+ expanded2
} finally {
popMacroContext()
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 379f56521b..bb938074cb 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -1420,11 +1420,12 @@ trait Namers extends MethodSynthesis {
if (!annotated.isInitialized) tree match {
case defn: MemberDef =>
val ainfos = defn.mods.annotations filterNot (_ eq null) map { ann =>
+ val ctx = typer.context
+ val annCtx = ctx.make(ann)
+ annCtx.setReportErrors()
// need to be lazy, #1782. beforeTyper to allow inferView in annotation args, SI-5892.
AnnotationInfo lazily {
- val context1 = typer.context.make(ann)
- context1.setReportErrors()
- beforeTyper(newTyper(context1) typedAnnotation ann)
+ beforeTyper(newTyper(annCtx) typedAnnotation ann)
}
}
if (ainfos.nonEmpty) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index d54f894c62..aafff8a48e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -164,7 +164,7 @@ trait NamesDefaults { self: Analyzer =>
// never used for constructor calls, they always have a stable qualifier
def blockWithQualifier(qual: Tree, selected: Name) = {
- val sym = blockTyper.context.owner.newValue(unit.freshTermName("qual$"), qual.pos) setInfo qual.tpe
+ val sym = blockTyper.context.owner.newValue(unit.freshTermName("qual$"), qual.pos) setInfo uncheckedBounds(qual.tpe)
blockTyper.context.scope enter sym
val vd = atPos(sym.pos)(ValDef(sym, qual) setType NoType)
// it stays in Vegas: SI-5720, SI-5727
@@ -291,9 +291,10 @@ trait NamesDefaults { self: Analyzer =>
arg.tpe
}
).widen // have to widen or types inferred from literal defaults will be singletons
- val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos) setInfo (
- if (byName) functionType(Nil, argTpe) else argTpe
- )
+ val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos) setInfo {
+ val tp = if (byName) functionType(Nil, argTpe) else argTpe
+ uncheckedBounds(tp)
+ }
Some((context.scope.enter(s), byName, repeated))
})
map2(symPs, args) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 03ce710700..09c4878b2f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -225,7 +225,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
* 1.8.1 M's type is a subtype of O's type, or
* 1.8.2 M is of type []S, O is of type ()T and S <: T, or
* 1.8.3 M is of type ()S, O is of type []T and S <: T, or
- * 1.9. If M is a macro def, O cannot be deferred.
+ * 1.9. If M is a macro def, O cannot be deferred unless there's a concrete method overriding O.
* 1.10. If M is not a macro def, O cannot be a macro def.
* 2. Check that only abstract classes have deferred members
* 3. Check that concrete classes do not have deferred definitions
@@ -417,7 +417,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
} else if (other.isValue && other.isLazy && !other.isSourceMethod && !other.isDeferred &&
member.isValue && !member.isLazy) {
overrideError("must be declared lazy to override a concrete lazy value")
- } else if (other.isDeferred && member.isTermMacro) { // (1.9)
+ } else if (other.isDeferred && member.isTermMacro && member.extendedOverriddenSymbols.forall(_.isDeferred)) { // (1.9)
overrideError("cannot override an abstract method")
} else if (other.isTermMacro && !member.isTermMacro) { // (1.10)
overrideError("cannot override a macro")
@@ -1513,17 +1513,35 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
false
}
- private def checkTypeRef(tp: Type, tree: Tree) = tp match {
+ private def checkTypeRef(tp: Type, tree: Tree, skipBounds: Boolean) = tp match {
case TypeRef(pre, sym, args) =>
checkDeprecated(sym, tree.pos)
if(sym.isJavaDefined)
sym.typeParams foreach (_.cookJavaRawInfo())
- if (!tp.isHigherKinded)
+ if (!tp.isHigherKinded && !skipBounds)
checkBounds(tree, pre, sym.owner, sym.typeParams, args)
case _ =>
}
- private def checkAnnotations(tpes: List[Type], tree: Tree) = tpes foreach (tp => checkTypeRef(tp, tree))
+ private def checkTypeRefBounds(tp: Type, tree: Tree) = {
+ var skipBounds = false
+ tp match {
+ case AnnotatedType(ann :: Nil, underlying, selfSym) if ann.symbol == UncheckedBoundsClass =>
+ skipBounds = true
+ underlying
+ case TypeRef(pre, sym, args) =>
+ if (!tp.isHigherKinded && !skipBounds)
+ checkBounds(tree, pre, sym.owner, sym.typeParams, args)
+ tp
+ case _ =>
+ tp
+ }
+ }
+
+ private def checkAnnotations(tpes: List[Type], tree: Tree) = tpes foreach { tp =>
+ checkTypeRef(tp, tree, skipBounds = false)
+ checkTypeRefBounds(tp, tree)
+ }
private def doTypeTraversal(tree: Tree)(f: Type => Unit) = if (!inPattern) tree.tpe foreach f
private def applyRefchecksToAnnotations(tree: Tree): Unit = {
@@ -1551,8 +1569,9 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
doTypeTraversal(tree) {
- case AnnotatedType(annots, _, _) => applyChecks(annots)
- case _ =>
+ case tp @ AnnotatedType(annots, _, _) =>
+ applyChecks(annots)
+ case tp =>
}
case _ =>
}
@@ -1735,13 +1754,27 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
val existentialParams = new ListBuffer[Symbol]
- doTypeTraversal(tree) { // check all bounds, except those that are existential type parameters
- case ExistentialType(tparams, tpe) =>
+ var skipBounds = false
+ // check all bounds, except those that are existential type parameters
+ // or those within typed annotated with @uncheckedBounds
+ doTypeTraversal(tree) {
+ case tp @ ExistentialType(tparams, tpe) =>
existentialParams ++= tparams
- case t: TypeRef =>
- checkTypeRef(deriveTypeWithWildcards(existentialParams.toList)(t), tree)
+ case ann: AnnotatedType if ann.hasAnnotation(UncheckedBoundsClass) =>
+ // SI-7694 Allow code synthetizers to disable checking of bounds for TypeTrees based on inferred LUBs
+ // which might not conform to the constraints.
+ skipBounds = true
+ case tp: TypeRef =>
+ val tpWithWildcards = deriveTypeWithWildcards(existentialParams.toList)(tp)
+ checkTypeRef(tpWithWildcards, tree, skipBounds)
case _ =>
}
+ if (skipBounds) {
+ tree.tpe = tree.tpe.map {
+ _.filterAnnotations(_.symbol != UncheckedBoundsClass)
+ }
+ }
+
tree
case TypeApply(fn, args) =>
@@ -1793,9 +1826,11 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case LabelDef(_, _, _) if treeInfo.hasSynthCaseSymbol(result) =>
val old = inPattern
inPattern = true
- val res = deriveLabelDef(result)(transform)
+ val res = deriveLabelDef(result)(transform) // TODO SI-7756 Too broad! The code from the original case body should be fully refchecked!
inPattern = old
res
+ case ValDef(_, _, _, _) if treeInfo.hasSynthCaseSymbol(result) =>
+ deriveValDef(result)(transform) // SI-7716 Don't refcheck the tpt of the synthetic val that holds the selector.
case _ =>
super.transform(result)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index 4950a7efef..2270e812eb 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -366,11 +366,14 @@ trait TypeDiagnostics {
val strings = mutable.Map[String, Set[TypeDiag]]() withDefaultValue Set()
val names = mutable.Map[Name, Set[TypeDiag]]() withDefaultValue Set()
- def record(t: Type, sym: Symbol) = {
- val diag = TypeDiag(t, sym)
+ val localsSet = locals.toSet
- strings("" + t) += diag
- names(sym.name) += diag
+ def record(t: Type, sym: Symbol) = {
+ if (!localsSet(sym)) {
+ val diag = TypeDiag(t, sym)
+ strings("" + t) += diag
+ names(sym.name) += diag
+ }
}
for (tpe <- types ; t <- tpe) {
t match {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index c59ef4ebda..bf2170310f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -656,8 +656,7 @@ trait Typers extends Modes with Adaptations with Tags {
// To fully benefit from special casing the return type of
// getClass, we have to catch it immediately so expressions
// like x.getClass().newInstance() are typed with the type of x.
- else if ( tree.symbol.name == nme.getClass_
- && tree.tpe.params.isEmpty
+ else if ( isGetClass(tree.symbol)
// TODO: If the type of the qualifier is inaccessible, we can cause private types
// to escape scope here, e.g. pos/t1107. I'm not sure how to properly handle this
// so for now it requires the type symbol be public.
@@ -1148,7 +1147,7 @@ trait Typers extends Modes with Adaptations with Tags {
else if (
inExprModeButNot(mode, FUNmode) && !tree.isDef && // typechecking application
tree.symbol != null && tree.symbol.isTermMacro && // of a macro
- !tree.attachments.get[SuppressMacroExpansionAttachment.type].isDefined)
+ !isMacroExpansionSuppressed(tree))
macroExpand(this, tree, mode, pt)
else if ((mode & (PATTERNmode | FUNmode)) == (PATTERNmode | FUNmode))
adaptConstrPattern()
@@ -1456,8 +1455,8 @@ trait Typers extends Modes with Adaptations with Tags {
implRestriction(tree, "nested object")
//see https://issues.scala-lang.org/browse/SI-6444
//see https://issues.scala-lang.org/browse/SI-6463
- case _: ClassDef =>
- implRestriction(tree, "nested class")
+ case cd: ClassDef if !cd.symbol.isAnonymousClass => // Don't warn about partial functions, etc. SI-7571
+ implRestriction(tree, "nested class") // avoiding Type Tests that might check the $outer pointer.
case Select(sup @ Super(qual, mix), selector) if selector != nme.CONSTRUCTOR && qual.symbol == clazz && mix != tpnme.EMPTY =>
//see https://issues.scala-lang.org/browse/SI-6483
implRestriction(sup, "qualified super reference")
@@ -2011,6 +2010,8 @@ trait Typers extends Modes with Adaptations with Tags {
// !!! This method is redundant with other, less buggy ones.
def decompose(call: Tree): (Tree, List[Tree]) = call match {
+ case _ if call.isErrorTyped => // e.g. SI-7636
+ (call, Nil)
case Apply(fn, args) =>
// an object cannot be allowed to pass a reference to itself to a superconstructor
// because of initialization issues; SI-473, SI-3913, SI-6928.
@@ -4058,6 +4059,7 @@ trait Typers extends Modes with Adaptations with Tags {
findSelection(cxTree) match {
case Some((opName, treeInfo.Applied(_, targs, _))) =>
val fun = gen.mkTypeApply(Select(qual, opName), targs)
+ if (opName == nme.updateDynamic) suppressMacroExpansion(fun) // SI-7617
atPos(qual.pos)(Apply(fun, Literal(Constant(name.decode)) :: Nil))
case _ =>
setError(tree)
@@ -4230,7 +4232,9 @@ trait Typers extends Modes with Adaptations with Tags {
}
def typedAssign(lhs: Tree, rhs: Tree): Tree = {
- val lhs1 = typed(lhs, EXPRmode | LHSmode, WildcardType)
+ // see SI-7617 for an explanation of why macro expansion is suppressed
+ def typedLhs(lhs: Tree) = typed(lhs, EXPRmode | LHSmode, WildcardType)
+ val lhs1 = unsuppressMacroExpansion(typedLhs(suppressMacroExpansion(lhs)))
val varsym = lhs1.symbol
// see #2494 for double error message example
@@ -5353,7 +5357,7 @@ trait Typers extends Modes with Adaptations with Tags {
// that typecheck must not trigger macro expansions, so we explicitly prohibit them
// however we cannot do `context.withMacrosDisabled`
// because `expr` might contain nested macro calls (see SI-6673)
- val exprTyped = typed1(expr updateAttachment SuppressMacroExpansionAttachment, mode, pt)
+ val exprTyped = typed1(suppressMacroExpansion(expr), mode, pt)
exprTyped match {
case macroDef if macroDef.symbol != null && macroDef.symbol.isTermMacro && !macroDef.symbol.isErroneous =>
MacroEtaError(exprTyped)
diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala
index 471e2653cf..a62c87e713 100644
--- a/src/compiler/scala/tools/nsc/util/ClassPath.scala
+++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala
@@ -16,6 +16,7 @@ import Jar.isJarOrZip
import File.pathSeparator
import java.net.MalformedURLException
import java.util.regex.PatternSyntaxException
+import scala.reflect.runtime.ReflectionUtils
/** <p>
* This module provides star expansion of '-classpath' option arguments, behaves the same as
@@ -100,7 +101,7 @@ object ClassPath {
}
/** A useful name filter. */
- def isTraitImplementation(name: String) = name endsWith "$class.class"
+ def isTraitImplementation(name: String) = ReflectionUtils.isTraitImplementation(name)
def specToURL(spec: String): Option[URL] =
try Some(new URL(spec))
@@ -161,7 +162,7 @@ object ClassPath {
}
object DefaultJavaContext extends JavaContext {
- override def isValidName(name: String) = !isTraitImplementation(name)
+ override def isValidName(name: String) = !ReflectionUtils.scalacShouldntLoadClassfile(name)
}
private def endsClass(s: String) = s.length > 6 && s.substring(s.length - 6) == ".class"
diff --git a/src/compiler/scala/tools/nsc/util/MsilClassPath.scala b/src/compiler/scala/tools/nsc/util/MsilClassPath.scala
index aa3b7c286d..77a19d3ead 100644
--- a/src/compiler/scala/tools/nsc/util/MsilClassPath.scala
+++ b/src/compiler/scala/tools/nsc/util/MsilClassPath.scala
@@ -15,7 +15,8 @@ import scala.util.Sorting
import scala.collection.mutable
import scala.tools.nsc.io.{ AbstractFile, MsilFile }
import ch.epfl.lamp.compiler.msil.{ Type => MSILType, Assembly }
-import ClassPath.{ ClassPathContext, isTraitImplementation }
+import ClassPath.ClassPathContext
+import scala.reflect.runtime.ReflectionUtils.isTraitImplementation
/** Keeping the MSIL classpath code in its own file is important to make sure
* we don't accidentally introduce a dependency on msil.jar in the jvm.
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index 4b5dcadfa2..b9541ece5d 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -8,12 +8,14 @@ import scala.tools.nsc.typechecker.Modes
import scala.tools.nsc.io.VirtualDirectory
import scala.tools.nsc.interpreter.AbstractFileClassLoader
import scala.tools.nsc.util.FreshNameCreator
+import scala.tools.nsc.ast.parser.Tokens.EOF
import scala.reflect.internal.Flags._
import scala.reflect.internal.util.{BatchSourceFile, NoSourceFile, NoFile}
import java.lang.{Class => jClass}
import scala.compat.Platform.EOL
import scala.reflect.NameTransformer
import scala.reflect.api.JavaUniverse
+import scala.reflect.io.NoAbstractFile
abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
@@ -137,7 +139,9 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val wrapper2 = if (!withMacrosDisabled) (currentTyper.context.withMacrosEnabled[Tree] _) else (currentTyper.context.withMacrosDisabled[Tree] _)
def wrapper (tree: => Tree) = wrapper1(wrapper2(tree))
- phase = (new Run).typerPhase // need to set a phase to something <= typerPhase, otherwise implicits in typedSelect will be disabled
+ val run = new Run
+ run.symSource(ownerClass) = NoAbstractFile // need to set file to something different from null, so that currentRun.defines works
+ phase = run.typerPhase // need to set a phase to something <= typerPhase, otherwise implicits in typedSelect will be disabled
currentTyper.context.setReportErrors() // need to manually set context mode, otherwise typer.silent will throw exceptions
reporter.reset()
@@ -166,7 +170,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
transformDuringTyper(expr, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled)(
(currentTyper, expr) => {
trace("typing (implicit views = %s, macros = %s): ".format(!withImplicitViewsDisabled, !withMacrosDisabled))(showAttributed(expr, true, true, settings.Yshowsymkinds.value))
- currentTyper.silent(_.typed(expr, analyzer.EXPRmode, pt)) match {
+ currentTyper.silent(_.typed(expr, analyzer.EXPRmode, pt), reportAmbiguousErrors = false) match {
case analyzer.SilentResultValue(result) =>
trace("success: ")(showAttributed(result, true, true, settings.Yshowsymkinds.value))
result
@@ -273,14 +277,13 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
def parse(code: String): Tree = {
val run = new Run
reporter.reset()
- val wrappedCode = "object wrapper {" + EOL + code + EOL + "}"
- val file = new BatchSourceFile("<toolbox>", wrappedCode)
+ val file = new BatchSourceFile("<toolbox>", code)
val unit = new CompilationUnit(file)
phase = run.parserPhase
val parser = new syntaxAnalyzer.UnitParser(unit)
- val wrappedTree = parser.parse()
+ val parsed = parser.templateStats()
+ parser.accept(EOF)
throwIfErrors()
- val PackageDef(_, List(ModuleDef(_, _, Template(_, _, _ :: parsed)))) = wrappedTree
parsed match {
case expr :: Nil => expr
case stats :+ expr => Block(stats, expr)
@@ -402,7 +405,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
def compile(tree: u.Tree): () => Any = {
if (compiler.settings.verbose.value) println("importing "+tree)
- var ctree: compiler.Tree = importer.importTree(tree)
+ val ctree: compiler.Tree = importer.importTree(tree)
if (compiler.settings.verbose.value) println("compiling "+ctree)
compiler.compile(ctree)
diff --git a/src/eclipse/README.md b/src/eclipse/README.md
index 39a3f457a0..44dd3d83ea 100644
--- a/src/eclipse/README.md
+++ b/src/eclipse/README.md
@@ -11,6 +11,9 @@ IMPORTANT
Preferences/General/Workspace/Linked Resources. The value should be the absolute
path to your scala checkout. All paths in project files are relative to this one,
so nothing will work before you do so.
+Additionally, we start using Maven dependencies (e.g. junit) so you need to define
+`classpath variable` inside Eclipse. Define `M2_REPO` in Java/Build Path/Classpath Variables
+to point to your local Maven repository (e.g. $HOME/.m2/repository).
2. The Eclipse Java compiler does not allow certain calls to restricted APIs in the
JDK. The Scala library uses such APIs, so you'd see this error:
diff --git a/src/eclipse/test-junit/.classpath b/src/eclipse/test-junit/.classpath
new file mode 100644
index 0000000000..718f7b6ece
--- /dev/null
+++ b/src/eclipse/test-junit/.classpath
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="test-junit"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry kind="lib" path="lib/ant/ant.jar"/>
+ <classpathentry kind="lib" path="lib/jline.jar"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
+ <classpathentry kind="var" path="M2_REPO/junit/junit/4.10/junit-4.10.jar"/>
+ <classpathentry kind="output" path="build-test-junit"/>
+</classpath>
diff --git a/src/eclipse/test-junit/.project b/src/eclipse/test-junit/.project
new file mode 100644
index 0000000000..052b6c1b6f
--- /dev/null
+++ b/src/eclipse/test-junit/.project
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>test-junit</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>build-test-junit</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/junit/classes</locationURI>
+ </link>
+ <link>
+ <name>lib</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/lib</locationURI>
+ </link>
+ <link>
+ <name>test-junit</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/test/junit</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala
index 6c11c5bcb5..714260fa8a 100644
--- a/src/library/scala/collection/concurrent/TrieMap.scala
+++ b/src/library/scala/collection/concurrent/TrieMap.scala
@@ -1011,8 +1011,11 @@ private[collection] class TrieMapIterator[K, V](var level: Int, private var ct:
*/
protected def subdivide(): Seq[Iterator[(K, V)]] = if (subiter ne null) {
// the case where an LNode is being iterated
- val it = subiter
- subiter = null
+ val it = newIterator(level + 1, ct, _mustInit = false)
+ it.depth = -1
+ it.subiter = this.subiter
+ it.current = null
+ this.subiter = null
advance()
this.level += 1
Seq(it, this)
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index 6b6ad29074..caf91fbb0f 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -14,7 +14,7 @@ import java.util.concurrent.{ ConcurrentLinkedQueue, TimeUnit, Callable }
import java.util.concurrent.TimeUnit.{ NANOSECONDS => NANOS, MILLISECONDS ⇒ MILLIS }
import java.lang.{ Iterable => JIterable }
import java.util.{ LinkedList => JLinkedList }
-import java.util.concurrent.atomic.{ AtomicReferenceFieldUpdater, AtomicInteger, AtomicBoolean }
+import java.util.concurrent.atomic.{ AtomicReferenceFieldUpdater, AtomicInteger, AtomicLong, AtomicBoolean }
import scala.util.control.NonFatal
import scala.Option
@@ -101,7 +101,7 @@ trait Future[+T] extends Awaitable[T] {
// that also have an executor parameter, which
// keeps us from accidentally forgetting to use
// the executor parameter.
- private implicit def internalExecutor: ExecutionContext = Future.InternalCallbackExecutor
+ private def internalExecutor = Future.InternalCallbackExecutor
/* Callbacks */
@@ -116,9 +116,10 @@ trait Future[+T] extends Awaitable[T] {
* $callbackInContext
*/
def onSuccess[U](pf: PartialFunction[T, U])(implicit executor: ExecutionContext): Unit = onComplete {
- case Success(v) if pf isDefinedAt v => pf(v)
+ case Success(v) =>
+ pf.applyOrElse[T, Any](v, Predef.conforms[T]) // Exploiting the cached function to avoid MatchError
case _ =>
- }(executor)
+ }
/** When this future is completed with a failure (i.e. with a throwable),
* apply the provided callback to the throwable.
@@ -134,9 +135,10 @@ trait Future[+T] extends Awaitable[T] {
* $callbackInContext
*/
def onFailure[U](callback: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = onComplete {
- case Failure(t) if NonFatal(t) && callback.isDefinedAt(t) => callback(t)
+ case Failure(t) =>
+ callback.applyOrElse[Throwable, Any](t, Predef.conforms[Throwable]) // Exploiting the cached function to avoid MatchError
case _ =>
- }(executor)
+ }
/** When this future is completed, either through an exception, or a value,
* apply the provided function.
@@ -186,13 +188,12 @@ trait Future[+T] extends Awaitable[T] {
* and throws a corresponding exception if the original future fails.
*/
def failed: Future[Throwable] = {
+ implicit val ec = internalExecutor
val p = Promise[Throwable]()
-
onComplete {
case Failure(t) => p success t
case Success(v) => p failure (new NoSuchElementException("Future.failed not completed with a throwable."))
}
-
p.future
}
@@ -203,10 +204,7 @@ trait Future[+T] extends Awaitable[T] {
*
* Will not be called if the future fails.
*/
- def foreach[U](f: T => U)(implicit executor: ExecutionContext): Unit = onComplete {
- case Success(r) => f(r)
- case _ => // do nothing
- }(executor)
+ def foreach[U](f: T => U)(implicit executor: ExecutionContext): Unit = onComplete { _ foreach f }
/** Creates a new future by applying the 's' function to the successful result of
* this future, or the 'f' function to the failed result. If there is any non-fatal
@@ -221,19 +219,11 @@ trait Future[+T] extends Awaitable[T] {
*/
def transform[S](s: T => S, f: Throwable => Throwable)(implicit executor: ExecutionContext): Future[S] = {
val p = Promise[S]()
-
+ // transform on Try has the wrong shape for us here
onComplete {
- case result =>
- try {
- result match {
- case Failure(t) => p failure f(t)
- case Success(r) => p success s(r)
- }
- } catch {
- case NonFatal(t) => p failure t
- }
- }(executor)
-
+ case Success(r) => p complete Try(s(r))
+ case Failure(t) => p complete Try(throw f(t)) // will throw fatal errors!
+ }
p.future
}
@@ -245,19 +235,7 @@ trait Future[+T] extends Awaitable[T] {
*/
def map[S](f: T => S)(implicit executor: ExecutionContext): Future[S] = { // transform(f, identity)
val p = Promise[S]()
-
- onComplete {
- case result =>
- try {
- result match {
- case Success(r) => p success f(r)
- case f: Failure[_] => p complete f.asInstanceOf[Failure[S]]
- }
- } catch {
- case NonFatal(t) => p failure t
- }
- }(executor)
-
+ onComplete { v => p complete (v map f) }
p.future
}
@@ -269,21 +247,16 @@ trait Future[+T] extends Awaitable[T] {
* $forComprehensionExamples
*/
def flatMap[S](f: T => Future[S])(implicit executor: ExecutionContext): Future[S] = {
- val p = Promise[S]()
-
+ import impl.Promise.DefaultPromise
+ val p = new DefaultPromise[S]()
onComplete {
case f: Failure[_] => p complete f.asInstanceOf[Failure[S]]
- case Success(v) =>
- try {
- f(v).onComplete({
- case f: Failure[_] => p complete f.asInstanceOf[Failure[S]]
- case Success(v) => p success v
- })(internalExecutor)
- } catch {
- case NonFatal(t) => p failure t
- }
- }(executor)
-
+ case Success(v) => try f(v) match {
+ // If possible, link DefaultPromises to avoid space leaks
+ case dp: DefaultPromise[_] => dp.asInstanceOf[DefaultPromise[S]].linkRootOf(p)
+ case fut => fut onComplete p.complete
+ } catch { case NonFatal(t) => p failure t }
+ }
p.future
}
@@ -303,34 +276,14 @@ trait Future[+T] extends Awaitable[T] {
* Await.result(h, Duration.Zero) // throw a NoSuchElementException
* }}}
*/
- def filter(pred: T => Boolean)(implicit executor: ExecutionContext): Future[T] = {
- val p = Promise[T]()
-
- onComplete {
- case f: Failure[_] => p complete f.asInstanceOf[Failure[T]]
- case Success(v) =>
- try {
- if (pred(v)) p success v
- else p failure new NoSuchElementException("Future.filter predicate is not satisfied")
- } catch {
- case NonFatal(t) => p failure t
- }
- }(executor)
-
- p.future
- }
+ def filter(pred: T => Boolean)(implicit executor: ExecutionContext): Future[T] =
+ map {
+ r => if (pred(r)) r else throw new NoSuchElementException("Future.filter predicate is not satisfied")
+ }
/** Used by for-comprehensions.
*/
final def withFilter(p: T => Boolean)(implicit executor: ExecutionContext): Future[T] = filter(p)(executor)
- // final def withFilter(p: T => Boolean) = new FutureWithFilter[T](this, p)
-
- // final class FutureWithFilter[+S](self: Future[S], p: S => Boolean) {
- // def foreach(f: S => Unit): Unit = self filter p foreach f
- // def map[R](f: S => R) = self filter p map f
- // def flatMap[R](f: S => Future[R]) = self filter p flatMap f
- // def withFilter(q: S => Boolean): FutureWithFilter[S] = new FutureWithFilter[S](self, x => p(x) && q(x))
- // }
/** Creates a new future by mapping the value of the current future, if the given partial function is defined at that value.
*
@@ -352,22 +305,10 @@ trait Future[+T] extends Awaitable[T] {
* Await.result(h, Duration.Zero) // throw a NoSuchElementException
* }}}
*/
- def collect[S](pf: PartialFunction[T, S])(implicit executor: ExecutionContext): Future[S] = {
- val p = Promise[S]()
-
- onComplete {
- case f: Failure[_] => p complete f.asInstanceOf[Failure[S]]
- case Success(v) =>
- try {
- if (pf.isDefinedAt(v)) p success pf(v)
- else p failure new NoSuchElementException("Future.collect partial function is not defined at: " + v)
- } catch {
- case NonFatal(t) => p failure t
- }
- }(executor)
-
- p.future
- }
+ def collect[S](pf: PartialFunction[T, S])(implicit executor: ExecutionContext): Future[S] =
+ map {
+ r => pf.applyOrElse(r, (t: T) => throw new NoSuchElementException("Future.collect partial function is not defined at: " + t))
+ }
/** Creates a new future that will handle any matching throwable that this
* future might contain. If there is no match, or if this future contains
@@ -383,9 +324,7 @@ trait Future[+T] extends Awaitable[T] {
*/
def recover[U >: T](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = {
val p = Promise[U]()
-
- onComplete { case tr => p.complete(tr recover pf) }(executor)
-
+ onComplete { v => p complete (v recover pf) }
p.future
}
@@ -404,17 +343,10 @@ trait Future[+T] extends Awaitable[T] {
*/
def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = {
val p = Promise[U]()
-
onComplete {
- case Failure(t) if pf isDefinedAt t =>
- try {
- p completeWith pf(t)
- } catch {
- case NonFatal(t) => p failure t
- }
- case otherwise => p complete otherwise
- }(executor)
-
+ case Failure(t) => try pf.applyOrElse(t, (_: Throwable) => this) onComplete p.complete catch { case NonFatal(t) => p failure t }
+ case other => p complete other
+ }
p.future
}
@@ -427,19 +359,12 @@ trait Future[+T] extends Awaitable[T] {
* with the throwable stored in `that`.
*/
def zip[U](that: Future[U]): Future[(T, U)] = {
+ implicit val ec = internalExecutor
val p = Promise[(T, U)]()
-
- this onComplete {
+ onComplete {
case f: Failure[_] => p complete f.asInstanceOf[Failure[(T, U)]]
- case Success(r) =>
- that onSuccess {
- case r2 => p success ((r, r2))
- }
- that onFailure {
- case f => p failure f
- }
+ case Success(s) => that onComplete { c => p.complete(c map { s2 => (s, s2) }) }
}
-
p.future
}
@@ -458,6 +383,7 @@ trait Future[+T] extends Awaitable[T] {
* }}}
*/
def fallbackTo[U >: T](that: Future[U]): Future[U] = {
+ implicit val ec = internalExecutor
val p = Promise[U]()
onComplete {
case s @ Success(_) => p complete s
@@ -470,23 +396,13 @@ trait Future[+T] extends Awaitable[T] {
* that conforms to `S`'s erased type or a `ClassCastException` otherwise.
*/
def mapTo[S](implicit tag: ClassTag[S]): Future[S] = {
- def boxedType(c: Class[_]): Class[_] = {
+ implicit val ec = internalExecutor
+ val boxedClass = {
+ val c = tag.runtimeClass
if (c.isPrimitive) Future.toBoxed(c) else c
}
-
- val p = Promise[S]()
-
- onComplete {
- case f: Failure[_] => p complete f.asInstanceOf[Failure[S]]
- case Success(t) =>
- p complete (try {
- Success(boxedType(tag.runtimeClass).cast(t).asInstanceOf[S])
- } catch {
- case e: ClassCastException => Failure(e)
- })
- }
-
- p.future
+ require(boxedClass ne null)
+ map(s => boxedClass.cast(s).asInstanceOf[S])
}
/** Applies the side-effecting function to the result of this future, and returns
@@ -514,11 +430,9 @@ trait Future[+T] extends Awaitable[T] {
*/
def andThen[U](pf: PartialFunction[Try[T], U])(implicit executor: ExecutionContext): Future[T] = {
val p = Promise[T]()
-
onComplete {
- case r => try if (pf isDefinedAt r) pf(r) finally p complete r
- }(executor)
-
+ case r => try pf.applyOrElse[Try[T], Any](r, Predef.conforms[Try[T]]) finally p complete r
+ }
p.future
}
@@ -579,14 +493,12 @@ object Future {
} map (_.result)
}
- /** Returns a `Future` to the result of the first future in the list that is completed.
+ /** Returns a new `Future` to the result of the first future in the list that is completed.
*/
def firstCompletedOf[T](futures: TraversableOnce[Future[T]])(implicit executor: ExecutionContext): Future[T] = {
val p = Promise[T]()
-
val completeFirst: Try[T] => Unit = p tryComplete _
- futures.foreach(_ onComplete completeFirst)
-
+ futures foreach { _ onComplete completeFirst }
p.future
}
@@ -626,7 +538,7 @@ object Future {
* }}}
*/
def fold[T, R](futures: TraversableOnce[Future[T]])(zero: R)(foldFun: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
- if (futures.isEmpty) Promise.successful(zero).future
+ if (futures.isEmpty) Future.successful(zero)
else sequence(futures).map(_.foldLeft(zero)(foldFun))
}
@@ -638,7 +550,7 @@ object Future {
* }}}
*/
def reduce[T, R >: T](futures: TraversableOnce[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
- if (futures.isEmpty) Promise[R].failure(new NoSuchElementException("reduce attempted on empty collection")).future
+ if (futures.isEmpty) Future.failed(new NoSuchElementException("reduce attempted on empty collection"))
else sequence(futures).map(_ reduceLeft op)
}
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index 52f1075137..c9b2a15f2f 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -8,11 +8,14 @@
package scala.concurrent.impl
-import scala.concurrent.{ ExecutionContext, CanAwait, OnCompleteRunnable, TimeoutException, ExecutionException }
+import scala.concurrent.{ ExecutionContext, CanAwait, OnCompleteRunnable, TimeoutException, ExecutionException, blocking }
+import scala.concurrent.Future.InternalCallbackExecutor
import scala.concurrent.duration.{ Duration, Deadline, FiniteDuration, NANOSECONDS }
import scala.annotation.tailrec
import scala.util.control.NonFatal
import scala.util.{ Try, Success, Failure }
+import java.io.ObjectInputStream
+import java.util.concurrent.locks.AbstractQueuedSynchronizer
private[concurrent] trait Promise[T] extends scala.concurrent.Promise[T] with scala.concurrent.Future[T] {
def future: this.type = this
@@ -53,98 +56,262 @@ private[concurrent] object Promise {
case t => Failure(t)
}
+ /**
+ * Latch used to implement waiting on a DefaultPromise's result.
+ *
+ * Inspired by: http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/src/main/java/util/concurrent/locks/AbstractQueuedSynchronizer.java
+ * Written by Doug Lea with assistance from members of JCP JSR-166
+ * Expert Group and released to the public domain, as explained at
+ * http://creativecommons.org/publicdomain/zero/1.0/
+ */
+ private final class CompletionLatch[T] extends AbstractQueuedSynchronizer with (Try[T] => Unit) {
+ override protected def tryAcquireShared(ignored: Int): Int = if (getState != 0) 1 else -1
+ override protected def tryReleaseShared(ignore: Int): Boolean = {
+ setState(1)
+ true
+ }
+ override def apply(ignored: Try[T]): Unit = releaseShared(1)
+ }
+
+
/** Default promise implementation.
+ *
+ * A DefaultPromise has three possible states. It can be:
+ *
+ * 1. Incomplete, with an associated list of callbacks waiting on completion.
+ * 2. Complete, with a result.
+ * 3. Linked to another DefaultPromise.
+ *
+ * If a DefaultPromise is linked it another DefaultPromise then it will
+ * delegate all its operations to that other promise. This means that two
+ * DefaultPromises that are linked will appear, to external callers, to have
+ * exactly the same state and behaviour. E.g. they will both appear to be
+ * either complete or incomplete, and with the same values.
+ *
+ * A DefaultPromise stores its state entirely in the AnyRef cell exposed by
+ * AbstractPromise. The type of object stored in the cell fully describes the
+ * current state of the promise.
+ *
+ * 1. List[CallbackRunnable] - The promise is incomplete and has zero or more callbacks
+ * to call when it is eventually completed.
+ * 2. Try[T] - The promise is complete and now contains its value.
+ * 3. DefaultPromise[T] - The promise is linked to another promise.
+ *
+ * The ability to link DefaultPromises is needed to prevent memory leaks when
+ * using Future.flatMap. The previous implementation of Future.flatMap used
+ * onComplete handlers to propagate the ultimate value of a flatMap operation
+ * to its promise. Recursive calls to flatMap built a chain of onComplete
+ * handlers and promises. Unfortunately none of the handlers or promises in
+ * the chain could be collected until the handlers had been called and
+ * detached, which only happened when the final flatMap future was completed.
+ * (In some situations, such as infinite streams, this would never actually
+ * happen.) Because of the fact that the promise implementation internally
+ * created references between promises, and these references were invisible to
+ * user code, it was easy for user code to accidentally build large chains of
+ * promises and thereby leak memory.
+ *
+ * The problem of leaks is solved by automatically breaking these chains of
+ * promises, so that promises don't refer to each other in a long chain. This
+ * allows each promise to be individually collected. The idea is to "flatten"
+ * the chain of promises, so that instead of each promise pointing to its
+ * neighbour, they instead point directly the promise at the root of the
+ * chain. This means that only the root promise is referenced, and all the
+ * other promises are available for garbage collection as soon as they're no
+ * longer referenced by user code.
+ *
+ * To make the chains flattenable, the concept of linking promises together
+ * needed to become an explicit feature of the DefaultPromise implementation,
+ * so that the implementation to navigate and rewire links as needed. The idea
+ * of linking promises is based on the [[Twitter promise implementation
+ * https://github.com/twitter/util/blob/master/util-core/src/main/scala/com/twitter/util/Promise.scala]].
+ *
+ * In practice, flattening the chain cannot always be done perfectly. When a
+ * promise is added to the end of the chain, it scans the chain and links
+ * directly to the root promise. This prevents the chain from growing forwards
+ * But the root promise for a chain can change, causing the chain to grow
+ * backwards, and leaving all previously-linked promise pointing at a promise
+ * which is no longer the root promise.
+ *
+ * To mitigate the problem of the root promise changing, whenever a promise's
+ * methods are called, and it needs a reference to its root promise it calls
+ * the `compressedRoot()` method. This method re-scans the promise chain to
+ * get the root promise, and also compresses its links so that it links
+ * directly to whatever the current root promise is. This ensures that the
+ * chain is flattened whenever `compressedRoot()` is called. And since
+ * `compressedRoot()` is called at every possible opportunity (when getting a
+ * promise's value, when adding an onComplete handler, etc), this will happen
+ * frequently. Unfortunately, even this eager relinking doesn't absolutely
+ * guarantee that the chain will be flattened and that leaks cannot occur.
+ * However eager relinking does greatly reduce the chance that leaks will
+ * occur.
+ *
+ * Future.flatMap links DefaultPromises together by calling the `linkRootOf`
+ * method. This is the only externally visible interface to linked
+ * DefaultPromises, and `linkedRootOf` is currently only designed to be called
+ * by Future.flatMap.
*/
class DefaultPromise[T] extends AbstractPromise with Promise[T] { self =>
- updateState(null, Nil) // Start at "No callbacks"
+ updateState(null, Nil) // The promise is incomplete and has no callbacks
- protected final def tryAwait(atMost: Duration): Boolean = {
- @tailrec
- def awaitUnsafe(deadline: Deadline, nextWait: FiniteDuration): Boolean = {
- if (!isCompleted && nextWait > Duration.Zero) {
- val ms = nextWait.toMillis
- val ns = (nextWait.toNanos % 1000000l).toInt // as per object.wait spec
-
- synchronized { if (!isCompleted) wait(ms, ns) }
-
- awaitUnsafe(deadline, deadline.timeLeft)
- } else
- isCompleted
+ /** Get the root promise for this promise, compressing the link chain to that
+ * promise if necessary.
+ *
+ * For promises that are not linked, the result of calling
+ * `compressedRoot()` will the promise itself. However for linked promises,
+ * this method will traverse each link until it locates the root promise at
+ * the base of the link chain.
+ *
+ * As a side effect of calling this method, the link from this promise back
+ * to the root promise will be updated ("compressed") to point directly to
+ * the root promise. This allows intermediate promises in the link chain to
+ * be garbage collected. Also, subsequent calls to this method should be
+ * faster as the link chain will be shorter.
+ */
+ @tailrec
+ private def compressedRoot(): DefaultPromise[T] = {
+ getState match {
+ case linked: DefaultPromise[_] =>
+ val target = linked.asInstanceOf[DefaultPromise[T]].root
+ if (linked eq target) target else if (updateState(linked, target)) target else compressedRoot()
+ case _ => this
}
- @tailrec
- def awaitUnbounded(): Boolean = {
- if (isCompleted) true
- else {
- synchronized { if (!isCompleted) wait() }
- awaitUnbounded()
- }
+ }
+
+ /** Get the promise at the root of the chain of linked promises. Used by `compressedRoot()`.
+ * The `compressedRoot()` method should be called instead of this method, as it is important
+ * to compress the link chain whenever possible.
+ */
+ @tailrec
+ private def root: DefaultPromise[T] = {
+ getState match {
+ case linked: DefaultPromise[_] => linked.asInstanceOf[DefaultPromise[T]].root
+ case _ => this
}
+ }
+ /** Try waiting for this promise to be completed.
+ */
+ protected final def tryAwait(atMost: Duration): Boolean = if (!isCompleted) {
import Duration.Undefined
+ import scala.concurrent.Future.InternalCallbackExecutor
atMost match {
- case u if u eq Undefined => throw new IllegalArgumentException("cannot wait for Undefined period")
- case Duration.Inf => awaitUnbounded
- case Duration.MinusInf => isCompleted
- case f: FiniteDuration => if (f > Duration.Zero) awaitUnsafe(f.fromNow, f) else isCompleted
+ case e if e eq Undefined => throw new IllegalArgumentException("cannot wait for Undefined period")
+ case Duration.Inf =>
+ val l = new CompletionLatch[T]()
+ onComplete(l)(InternalCallbackExecutor)
+ l.acquireSharedInterruptibly(1)
+ case Duration.MinusInf => // Drop out
+ case f: FiniteDuration =>
+ if (f > Duration.Zero) {
+ val l = new CompletionLatch[T]()
+ onComplete(l)(InternalCallbackExecutor)
+ l.tryAcquireSharedNanos(1, f.toNanos)
+ }
}
- }
+
+ isCompleted
+ } else true // Already completed
@throws(classOf[TimeoutException])
@throws(classOf[InterruptedException])
def ready(atMost: Duration)(implicit permit: CanAwait): this.type =
- if (isCompleted || tryAwait(atMost)) this
+ if (tryAwait(atMost)) this
else throw new TimeoutException("Futures timed out after [" + atMost + "]")
@throws(classOf[Exception])
def result(atMost: Duration)(implicit permit: CanAwait): T =
- ready(atMost).value.get match {
- case Failure(e) => throw e
- case Success(r) => r
- }
+ ready(atMost).value.get.get // ready throws TimeoutException if timeout so value.get is safe here
- def value: Option[Try[T]] = getState match {
+ def value: Option[Try[T]] = value0
+
+ @tailrec
+ private def value0: Option[Try[T]] = getState match {
case c: Try[_] => Some(c.asInstanceOf[Try[T]])
+ case _: DefaultPromise[_] => compressedRoot().value0
case _ => None
}
- override def isCompleted: Boolean = getState match { // Cheaper than boxing result into Option due to "def value"
+ override def isCompleted: Boolean = isCompleted0
+
+ @tailrec
+ private def isCompleted0: Boolean = getState match {
case _: Try[_] => true
+ case _: DefaultPromise[_] => compressedRoot().isCompleted0
case _ => false
}
def tryComplete(value: Try[T]): Boolean = {
val resolved = resolveTry(value)
- (try {
- @tailrec
- def tryComplete(v: Try[T]): List[CallbackRunnable[T]] = {
- getState match {
- case raw: List[_] =>
- val cur = raw.asInstanceOf[List[CallbackRunnable[T]]]
- if (updateState(cur, v)) cur else tryComplete(v)
- case _ => null
- }
- }
- tryComplete(resolved)
- } finally {
- synchronized { notifyAll() } //Notify any evil blockers
- }) match {
+ tryCompleteAndGetListeners(resolved) match {
case null => false
case rs if rs.isEmpty => true
case rs => rs.foreach(r => r.executeWithValue(resolved)); true
}
}
+ /** Called by `tryComplete` to store the resolved value and get the list of
+ * listeners, or `null` if it is already completed.
+ */
+ @tailrec
+ private def tryCompleteAndGetListeners(v: Try[T]): List[CallbackRunnable[T]] = {
+ getState match {
+ case raw: List[_] =>
+ val cur = raw.asInstanceOf[List[CallbackRunnable[T]]]
+ if (updateState(cur, v)) cur else tryCompleteAndGetListeners(v)
+ case _: DefaultPromise[_] =>
+ compressedRoot().tryCompleteAndGetListeners(v)
+ case _ => null
+ }
+ }
+
def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = {
val preparedEC = executor.prepare
val runnable = new CallbackRunnable[T](preparedEC, func)
+ dispatchOrAddCallback(runnable)
+ }
- @tailrec //Tries to add the callback, if already completed, it dispatches the callback to be executed
- def dispatchOrAddCallback(): Unit =
- getState match {
- case r: Try[_] => runnable.executeWithValue(r.asInstanceOf[Try[T]])
- case listeners: List[_] => if (updateState(listeners, runnable :: listeners)) () else dispatchOrAddCallback()
- }
- dispatchOrAddCallback()
+ /** Tries to add the callback, if already completed, it dispatches the callback to be executed.
+ * Used by `onComplete()` to add callbacks to a promise and by `link()` to transfer callbacks
+ * to the root promise when linking two promises togehter.
+ */
+ @tailrec
+ private def dispatchOrAddCallback(runnable: CallbackRunnable[T]): Unit = {
+ getState match {
+ case r: Try[_] => runnable.executeWithValue(r.asInstanceOf[Try[T]])
+ case _: DefaultPromise[_] => compressedRoot().dispatchOrAddCallback(runnable)
+ case listeners: List[_] => if (updateState(listeners, runnable :: listeners)) () else dispatchOrAddCallback(runnable)
+ }
+ }
+
+ /** Link this promise to the root of another promise using `link()`. Should only be
+ * be called by Future.flatMap.
+ */
+ protected[concurrent] final def linkRootOf(target: DefaultPromise[T]): Unit = link(target.compressedRoot())
+
+ /** Link this promise to another promise so that both promises share the same
+ * externally-visible state. Depending on the current state of this promise, this
+ * may involve different things. For example, any onComplete listeners will need
+ * to be transferred.
+ *
+ * If this promise is already completed, then the same effect as linking -
+ * sharing the same completed value - is achieved by simply sending this
+ * promise's result to the target promise.
+ */
+ @tailrec
+ private def link(target: DefaultPromise[T]): Unit = if (this ne target) {
+ getState match {
+ case r: Try[_] =>
+ if (!target.tryComplete(r.asInstanceOf[Try[T]])) {
+ // Currently linking is done from Future.flatMap, which should ensure only
+ // one promise can be completed. Therefore this situation is unexpected.
+ throw new IllegalStateException("Cannot link completed promises together")
+ }
+ case _: DefaultPromise[_] =>
+ compressedRoot().link(target)
+ case listeners: List[_] => if (updateState(listeners, target)) {
+ if (!listeners.isEmpty) listeners.asInstanceOf[List[CallbackRunnable[T]]].foreach(target.dispatchOrAddCallback(_))
+ } else link(target)
+ }
}
}
diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala
index 7c14ed3a9e..62528e15fa 100644
--- a/src/library/scala/math/BigDecimal.scala
+++ b/src/library/scala/math/BigDecimal.scala
@@ -159,7 +159,7 @@ object BigDecimal {
* @author Stephane Micheloud
* @version 1.0
*/
-@deprecatedInheritance("This class will me made final.", "2.10.0")
+@deprecatedInheritance("This class will be made final.", "2.10.0")
class BigDecimal(
val bigDecimal: BigDec,
val mc: MathContext)
diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala
index fdba0ec716..58838f13a7 100644
--- a/src/library/scala/math/BigInt.scala
+++ b/src/library/scala/math/BigInt.scala
@@ -114,7 +114,7 @@ object BigInt {
* @author Martin Odersky
* @version 1.0, 15/07/2003
*/
-@deprecatedInheritance("This class will me made final.", "2.10.0")
+@deprecatedInheritance("This class will be made final.", "2.10.0")
class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericConversions with Serializable {
/** Returns the hash code for this BigInt. */
override def hashCode(): Int =
diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala
index cc145134c4..d04e5e48fe 100644
--- a/src/library/scala/util/Properties.scala
+++ b/src/library/scala/util/Properties.scala
@@ -72,7 +72,7 @@ private[scala] trait PropertiesTrait {
* it is an RC, Beta, etc. or was built from source, or if the version
* cannot be read.
*/
- val releaseVersion =
+ val releaseVersion =
for {
v <- scalaPropOrNone("maven.version.number")
if !(v endsWith "-SNAPSHOT")
@@ -86,7 +86,7 @@ private[scala] trait PropertiesTrait {
* @return Some(version) if this is a non-final version, None if this
* is a final release or the version cannot be read.
*/
- val developmentVersion =
+ val developmentVersion =
for {
v <- scalaPropOrNone("maven.version.number")
if v endsWith "-SNAPSHOT"
@@ -119,8 +119,7 @@ private[scala] trait PropertiesTrait {
*/
def lineSeparator = propOrElse("line.separator", "\n")
- /** Various well-known properties.
- */
+ /* Various well-known properties. */
def javaClassPath = propOrEmpty("java.class.path")
def javaHome = propOrEmpty("java.home")
def javaVendor = propOrEmpty("java.vendor")
@@ -129,6 +128,10 @@ private[scala] trait PropertiesTrait {
def javaVmName = propOrEmpty("java.vm.name")
def javaVmVendor = propOrEmpty("java.vm.vendor")
def javaVmVersion = propOrEmpty("java.vm.version")
+ // this property must remain less-well-known until 2.11
+ private def javaSpecVersion = propOrEmpty("java.specification.version")
+ //private def javaSpecVendor = propOrEmpty("java.specification.vendor")
+ //private def javaSpecName = propOrEmpty("java.specification.name")
def osName = propOrEmpty("os.name")
def scalaHome = propOrEmpty("scala.home")
def tmpDir = propOrEmpty("java.io.tmpdir")
@@ -136,10 +139,13 @@ private[scala] trait PropertiesTrait {
def userHome = propOrEmpty("user.home")
def userName = propOrEmpty("user.name")
- /** Some derived values.
- */
+ /* Some derived values. */
+ /** Returns `true` iff the underlying operating system is a version of Microsoft Windows. */
def isWin = osName startsWith "Windows"
- def isMac = javaVendor startsWith "Apple"
+ // See http://mail.openjdk.java.net/pipermail/macosx-port-dev/2012-November/005148.html for
+ // the reason why we don't follow developer.apple.com/library/mac/#technotes/tn2002/tn2110.
+ /** Returns `true` iff the underlying operating system is a version of Apple Mac OSX. */
+ def isMac = osName startsWith "Mac OS X"
// This is looking for javac, tools.jar, etc.
// Tries JDK_HOME first, then the more common but likely jre JAVA_HOME,
@@ -150,18 +156,29 @@ private[scala] trait PropertiesTrait {
def scalaCmd = if (isWin) "scala.bat" else "scala"
def scalacCmd = if (isWin) "scalac.bat" else "scalac"
- /** Can the java version be determined to be at least as high as the argument?
- * Hard to properly future proof this but at the rate 1.7 is going we can leave
- * the issue for our cyborg grandchildren to solve.
+ /** Compares the given specification version to the specification version of the platform.
+ *
+ * @param version a specification version of the form "major.minor"
+ * @return `true` iff the specification version of the current runtime
+ * is equal to or higher than the version denoted by the given string.
+ * @throws NumberFormatException if the given string is not a version string
+ *
+ * @example {{{
+ * // In this example, the runtime's Java specification is assumed to be at version 1.7.
+ * isJavaAtLeast("1.6") // true
+ * isJavaAtLeast("1.7") // true
+ * isJavaAtLeast("1.8") // false
+ * }}
*/
- def isJavaAtLeast(version: String) = {
- val okVersions = version match {
- case "1.5" => List("1.5", "1.6", "1.7")
- case "1.6" => List("1.6", "1.7")
- case "1.7" => List("1.7")
- case _ => Nil
+ def isJavaAtLeast(version: String): Boolean = {
+ def parts(x: String) = {
+ val i = x.indexOf('.')
+ if (i < 0) throw new NumberFormatException("Not a version: " + x)
+ (x.substring(0, i), x.substring(i+1, x.length))
}
- okVersions exists (javaVersion startsWith _)
+ val (v, _v) = parts(version)
+ val (s, _s) = parts(javaSpecVersion)
+ s.toInt >= v.toInt && _s.toInt >= _v.toInt
}
// provide a main method so version info can be obtained by running this
diff --git a/src/partest/scala/tools/partest/DirectTest.scala b/src/partest/scala/tools/partest/DirectTest.scala
index e2dac2fd55..8fcaa6423c 100644
--- a/src/partest/scala/tools/partest/DirectTest.scala
+++ b/src/partest/scala/tools/partest/DirectTest.scala
@@ -6,6 +6,7 @@
package scala.tools.partest
import scala.tools.nsc._
+import settings.ScalaVersion
import io.Directory
import util.{ SourceFile, BatchSourceFile, CommandLineParser }
import reporters.{Reporter, ConsoleReporter}
@@ -101,4 +102,30 @@ abstract class DirectTest extends App {
final def log(msg: => Any) {
if (isDebug) Console.err println msg
}
+
+ /**
+ * Run a test only if the current java version is at least the version specified.
+ */
+ def testUnderJavaAtLeast[A](version: String)(yesRun: =>A) = new TestUnderJavaAtLeast(version, { yesRun })
+
+ class TestUnderJavaAtLeast[A](version: String, yesRun: => A) {
+ val javaVersion = System.getProperty("java.specification.version")
+
+ // the "ScalaVersion" class parses Java specification versions just fine
+ val requiredJavaVersion = ScalaVersion(version)
+ val executingJavaVersion = ScalaVersion(javaVersion)
+ val shouldRun = executingJavaVersion >= requiredJavaVersion
+ val preamble = if (shouldRun) "Attempting" else "Doing fallback for"
+
+ def logInfo() = log(s"$preamble java $version specific test under java version $javaVersion")
+
+ /*
+ * If the current java version is at least 'version' then 'yesRun' is evaluated
+ * otherwise 'fallback' is
+ */
+ def otherwise(fallback: =>A): A = {
+ logInfo()
+ if (shouldRun) yesRun else fallback
+ }
+ }
}
diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala
index 0199400ada..dc40f9f81b 100644
--- a/src/partest/scala/tools/partest/PartestTask.scala
+++ b/src/partest/scala/tools/partest/PartestTask.scala
@@ -19,6 +19,7 @@ import java.lang.reflect.Method
import org.apache.tools.ant.Task
import org.apache.tools.ant.types.{Path, Reference, FileSet}
import org.apache.tools.ant.types.Commandline.Argument
+import scala.tools.ant.ScalaTask
/** An Ant task to execute the Scala test suite (NSC).
*
@@ -54,7 +55,7 @@ import org.apache.tools.ant.types.Commandline.Argument
*
* @author Philippe Haller
*/
-class PartestTask extends Task with CompilationPathProperty {
+class PartestTask extends Task with CompilationPathProperty with ScalaTask {
def addConfiguredPosTests(input: FileSet) {
posFiles = Some(input)
@@ -406,7 +407,7 @@ class PartestTask extends Task with CompilationPathProperty {
val allFailures = _results map (_._2) sum
val allFailedPaths = _results flatMap (_._3)
- def f = if (errorOnFailed && allFailures > 0) (sys error _) else log(_: String)
+ def f = if (errorOnFailed && allFailures > 0) buildError(_: String) else log(_: String)
def s = if (allFailures > 1) "s" else ""
val msg =
if (allFailures > 0)
diff --git a/src/reflect/scala/reflect/internal/CapturedVariables.scala b/src/reflect/scala/reflect/internal/CapturedVariables.scala
index 77909d9157..a3d2a8bd94 100644
--- a/src/reflect/scala/reflect/internal/CapturedVariables.scala
+++ b/src/reflect/scala/reflect/internal/CapturedVariables.scala
@@ -29,7 +29,7 @@ trait CapturedVariables { self: SymbolTable =>
def refType(valueRef: Map[Symbol, Symbol], objectRefClass: Symbol) =
if (isPrimitiveValueClass(symClass) && symClass != UnitClass) valueRef(symClass).tpe
else if (erasedTypes) objectRefClass.tpe
- else appliedType(objectRefClass, tpe)
+ else appliedType(objectRefClass, tpe1)
if (vble.hasAnnotation(VolatileAttr)) refType(volatileRefClass, VolatileObjectRefClass)
else refType(refClass, ObjectRefClass)
}
diff --git a/src/reflect/scala/reflect/internal/ClassfileConstants.scala b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
index c198271fb1..eb70ff3f17 100644
--- a/src/reflect/scala/reflect/internal/ClassfileConstants.scala
+++ b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
@@ -335,13 +335,8 @@ object ClassfileConstants {
abstract class FlagTranslation {
import Flags._
- private var isAnnotation = false
- private var isClass = false
- private def initFields(flags: Int) = {
- isAnnotation = (flags & JAVA_ACC_ANNOTATION) != 0
- isClass = false
- }
- private def translateFlag(jflag: Int): Long = (jflag: @switch) match {
+ private def isAnnotation(flags: Int): Boolean = (flags & JAVA_ACC_ANNOTATION) != 0
+ private def translateFlag(jflag: Int, isAnnotation: Boolean, isClass: Boolean): Long = (jflag: @switch) match {
case JAVA_ACC_PRIVATE => PRIVATE
case JAVA_ACC_PROTECTED => PROTECTED
case JAVA_ACC_FINAL => FINAL
@@ -351,31 +346,28 @@ object ClassfileConstants {
case JAVA_ACC_INTERFACE => if (isAnnotation) 0L else TRAIT | INTERFACE | ABSTRACT
case _ => 0L
}
- private def translateFlags(jflags: Int, baseFlags: Long): Long = {
+ private def translateFlags(jflags: Int, baseFlags: Long, isAnnotation: Boolean, isClass: Boolean): Long = {
+ def translateFlag0(jflags: Int): Long = translateFlag(jflags, isAnnotation, isClass)
var res: Long = JAVA | baseFlags
/** fast, elegant, maintainable, pick any two... */
- res |= translateFlag(jflags & JAVA_ACC_PRIVATE)
- res |= translateFlag(jflags & JAVA_ACC_PROTECTED)
- res |= translateFlag(jflags & JAVA_ACC_FINAL)
- res |= translateFlag(jflags & JAVA_ACC_SYNTHETIC)
- res |= translateFlag(jflags & JAVA_ACC_STATIC)
- res |= translateFlag(jflags & JAVA_ACC_ABSTRACT)
- res |= translateFlag(jflags & JAVA_ACC_INTERFACE)
+ res |= translateFlag0(jflags & JAVA_ACC_PRIVATE)
+ res |= translateFlag0(jflags & JAVA_ACC_PROTECTED)
+ res |= translateFlag0(jflags & JAVA_ACC_FINAL)
+ res |= translateFlag0(jflags & JAVA_ACC_SYNTHETIC)
+ res |= translateFlag0(jflags & JAVA_ACC_STATIC)
+ res |= translateFlag0(jflags & JAVA_ACC_ABSTRACT)
+ res |= translateFlag0(jflags & JAVA_ACC_INTERFACE)
res
}
def classFlags(jflags: Int): Long = {
- initFields(jflags)
- isClass = true
- translateFlags(jflags, 0)
+ translateFlags(jflags, 0, isAnnotation(jflags), isClass = true)
}
def fieldFlags(jflags: Int): Long = {
- initFields(jflags)
- translateFlags(jflags, if ((jflags & JAVA_ACC_FINAL) == 0) MUTABLE else 0)
+ translateFlags(jflags, if ((jflags & JAVA_ACC_FINAL) == 0) MUTABLE else 0 , isAnnotation(jflags), isClass = false)
}
def methodFlags(jflags: Int): Long = {
- initFields(jflags)
- translateFlags(jflags, if ((jflags & JAVA_ACC_BRIDGE) != 0) BRIDGE else 0)
+ translateFlags(jflags, if ((jflags & JAVA_ACC_BRIDGE) != 0) BRIDGE else 0, isAnnotation(jflags), isClass = false)
}
}
object FlagTranslation extends FlagTranslation { }
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index e5d9e54a16..09d7af82d1 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -110,8 +110,10 @@ trait Definitions extends api.StandardDefinitions {
/** Is symbol a numeric value class? */
def isNumericValueClass(sym: Symbol) = ScalaNumericValueClasses contains sym
- def isGetClass(sym: Symbol) =
- (sym.name == nme.getClass_) && flattensToEmpty(sym.paramss)
+ def isGetClass(sym: Symbol) = (
+ sym.name == nme.getClass_ // this condition is for performance only, this is called from `Typer#stabliize`.
+ && getClassMethods(sym)
+ )
lazy val UnitClass = valueClassSymbol(tpnme.Unit)
lazy val ByteClass = valueClassSymbol(tpnme.Byte)
@@ -805,6 +807,12 @@ trait Definitions extends api.StandardDefinitions {
lazy val Any_isInstanceOf = newT1NullaryMethod(AnyClass, nme.isInstanceOf_, FINAL)(_ => booltype)
lazy val Any_asInstanceOf = newT1NullaryMethod(AnyClass, nme.asInstanceOf_, FINAL)(_.typeConstructor)
+ lazy val primitiveGetClassMethods = Set[Symbol](Any_getClass, AnyVal_getClass) ++ (
+ ScalaValueClasses map (_.tpe member nme.getClass_)
+ )
+
+ lazy val getClassMethods: Set[Symbol] = primitiveGetClassMethods + Object_getClass
+
// A type function from T => Class[U], used to determine the return
// type of getClass calls. The returned type is:
//
@@ -975,6 +983,7 @@ trait Definitions extends api.StandardDefinitions {
lazy val ThrowsClass = requiredClass[scala.throws[_]]
lazy val TransientAttr = requiredClass[scala.transient]
lazy val UncheckedClass = requiredClass[scala.unchecked]
+ lazy val UncheckedBoundsClass = getClassIfDefined("scala.reflect.internal.annotations.uncheckedBounds")
lazy val UnspecializedClass = requiredClass[scala.annotation.unspecialized]
lazy val VolatileAttr = requiredClass[scala.volatile]
diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala
index 43902c1930..9a8dee1f15 100644
--- a/src/reflect/scala/reflect/internal/Importers.scala
+++ b/src/reflect/scala/reflect/internal/Importers.scala
@@ -443,7 +443,12 @@ trait Importers extends api.Importers { self: SymbolTable =>
}
})
tryFixup()
- mytree
+ // we have to be careful with position import as some shared trees
+ // like EmptyTree, emptyValDef don't support position assignment
+ if (tree.pos != NoPosition)
+ mytree.setPos(importPosition(tree.pos))
+ else
+ mytree
}
def importValDef(tree: from.ValDef): ValDef = importTree(tree).asInstanceOf[ValDef]
diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala
index 80d247c0ea..35cb749ede 100644
--- a/src/reflect/scala/reflect/internal/Printers.scala
+++ b/src/reflect/scala/reflect/internal/Printers.scala
@@ -564,6 +564,8 @@ trait Printers extends api.Printers { self: SymbolTable =>
case refTree: RefTree =>
if (tree.symbol.name != refTree.name) print("[", tree.symbol, " aka ", refTree.name, "]")
else print(tree.symbol)
+ case defTree: DefTree =>
+ print(tree.symbol)
case _ =>
print(tree.symbol.name)
}
diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala
index b782353ed3..539d19140c 100644
--- a/src/reflect/scala/reflect/internal/StdAttachments.scala
+++ b/src/reflect/scala/reflect/internal/StdAttachments.scala
@@ -42,4 +42,35 @@ trait StdAttachments {
* (but think thrice before using that API - see the discussion at https://github.com/scala/scala/pull/1639).
*/
case object SuppressMacroExpansionAttachment
+
+ /** Suppresses macro expansion of the tree by putting SuppressMacroExpansionAttachment on it.
+ */
+ def suppressMacroExpansion(tree: Tree) = tree.updateAttachment(SuppressMacroExpansionAttachment)
+
+ /** Unsuppresses macro expansion of the tree by removing SuppressMacroExpansionAttachment from it and its children.
+ */
+ def unsuppressMacroExpansion(tree: Tree): Tree = {
+ tree.removeAttachment[SuppressMacroExpansionAttachment.type]
+ tree match {
+ // see the comment to `isMacroExpansionSuppressed` to learn why we need
+ // a special traversal strategy here
+ case Apply(fn, _) => unsuppressMacroExpansion(fn)
+ case TypeApply(fn, _) => unsuppressMacroExpansion(fn)
+ case _ => // do nothing
+ }
+ tree
+ }
+
+ /** Determines whether a tree should not be expanded, because someone has put SuppressMacroExpansionAttachment on it or one of its children.
+ */
+ def isMacroExpansionSuppressed(tree: Tree): Boolean =
+ if (tree.attachments.get[SuppressMacroExpansionAttachment.type].isDefined) true
+ else tree match {
+ // we have to account for the fact that during typechecking an expandee might become wrapped,
+ // i.e. surrounded by an inferred implicit argument application or by an inferred type argument application.
+ // in that case the expandee itself will no longer be suppressed and we need to look at the core
+ case Apply(fn, _) => isMacroExpansionSuppressed(fn)
+ case TypeApply(fn, _) => isMacroExpansionSuppressed(fn)
+ case _ => false
+ }
}
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index 2585b541ed..53b9b1d88e 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -1526,15 +1526,18 @@ trait Trees extends api.Trees { self: SymbolTable =>
}
}
- private lazy val duplicator = new Transformer {
+ private lazy val duplicator = new Duplicator(focusPositions = true)
+ private class Duplicator(focusPositions: Boolean) extends Transformer {
override val treeCopy = newStrictTreeCopier
override def transform(t: Tree) = {
val t1 = super.transform(t)
- if ((t1 ne t) && t1.pos.isRange) t1 setPos t.pos.focus
+ if ((t1 ne t) && t1.pos.isRange && focusPositions) t1 setPos t.pos.focus
t1
}
}
+ def duplicateAndKeepPositions(tree: Tree) = new Duplicator(focusPositions = false) transform tree
+
// ------ copiers -------------------------------------------
def copyDefDef(tree: Tree)(
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 0cc2b91a44..cd9f3d23c9 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -1172,6 +1172,14 @@ trait Types extends api.Types { self: SymbolTable =>
continue = false
val bcs0 = baseClasses
var bcs = bcs0
+ // omit PRIVATE LOCALS unless selector class is contained in class owning the def.
+ def admitPrivateLocal(owner: Symbol): Boolean = {
+ val selectorClass = this match {
+ case tt: ThisType => tt.sym // SI-7507 the first base class is not necessarily the selector class.
+ case _ => bcs0.head
+ }
+ selectorClass.hasTransOwner(owner)
+ }
while (!bcs.isEmpty) {
val decls = bcs.head.info.decls
var entry = decls.lookupEntry(name)
@@ -1181,10 +1189,10 @@ trait Types extends api.Types { self: SymbolTable =>
if ((flags & required) == required) {
val excl = flags & excluded
if (excl == 0L &&
- (// omit PRIVATE LOCALS unless selector class is contained in class owning the def.
+ (
(bcs eq bcs0) ||
(flags & PrivateLocal) != PrivateLocal ||
- (bcs0.head.hasTransOwner(bcs.head)))) {
+ admitPrivateLocal(bcs.head))) {
if (name.isTypeName || stableOnly && sym.isStable) {
if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
if (suspension ne null) suspension foreach (_.suspended = false)
@@ -7305,6 +7313,12 @@ trait Types extends api.Types { self: SymbolTable =>
else (ps :+ SerializableClass.tpe).toList
)
+ /** Adds the @uncheckedBound annotation if the given `tp` has type arguments */
+ final def uncheckedBounds(tp: Type): Type = {
+ if (tp.typeArgs.isEmpty || UncheckedBoundsClass == NoSymbol) tp // second condition for backwards compatibilty with older scala-reflect.jar
+ else tp.withAnnotation(AnnotationInfo marker UncheckedBoundsClass.tpe)
+ }
+
/** Members of the given class, other than those inherited
* from Any or AnyRef.
*/
diff --git a/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala b/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala
new file mode 100644
index 0000000000..a44bb54734
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala
@@ -0,0 +1,13 @@
+package scala.reflect
+package internal
+package annotations
+
+/**
+ * An annotation that designates the annotated type should not be checked for violations of
+ * type parameter bounds in the `refchecks` phase of the compiler. This can be used by synthesized
+ * code the uses an inferred type of an expression as the type of an artifict val/def (for example,
+ * a temporary value introduced by an ANF transform). See [[https://issues.scala-lang.org/browse/SI-7694]].
+ *
+ * @since 2.10.3
+ */
+final class uncheckedBounds extends scala.annotation.StaticAnnotation
diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
index ccc727451c..22fe7f098c 100644
--- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
@@ -19,7 +19,7 @@ import scala.collection.mutable.{ HashMap, ListBuffer }
import internal.Flags._
//import scala.tools.nsc.util.ScalaClassLoader
//import scala.tools.nsc.util.ScalaClassLoader._
-import ReflectionUtils.{staticSingletonInstance, innerSingletonInstance}
+import ReflectionUtils.{staticSingletonInstance, innerSingletonInstance, scalacShouldntLoadClass}
import scala.language.existentials
import scala.runtime.{ScalaRunTime, BoxesRunTime}
import scala.reflect.internal.util.Collections._
@@ -567,15 +567,10 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
loadBytes[Array[String]]("scala.reflect.ScalaLongSignature") match {
case Some(slsig) =>
info(s"unpickling Scala $clazz and $module with long Scala signature")
- val byteSegments = slsig map (_.getBytes)
- val lens = byteSegments map ByteCodecs.decode
- val bytes = Array.ofDim[Byte](lens.sum)
- var len = 0
- for ((bs, l) <- byteSegments zip lens) {
- bs.copyToArray(bytes, len, l)
- len += l
- }
- unpickler.unpickle(bytes, 0, clazz, module, jclazz.getName)
+ val encoded = slsig flatMap (_.getBytes)
+ val len = ByteCodecs.decode(encoded)
+ val decoded = encoded.take(len)
+ unpickler.unpickle(decoded, 0, clazz, module, jclazz.getName)
case None =>
// class does not have a Scala signature; it's a Java class
info("translating reflection info for Java " + jclazz) //debug
@@ -678,8 +673,10 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
val parents = try {
parentsLevel += 1
val jsuperclazz = jclazz.getGenericSuperclass
- val superclazz = if (jsuperclazz == null) AnyClass.tpe else typeToScala(jsuperclazz)
- superclazz :: (jclazz.getGenericInterfaces.toList map typeToScala)
+ val ifaces = jclazz.getGenericInterfaces.toList map typeToScala
+ val isAnnotation = (jclazz.getModifiers & JAVA_ACC_ANNOTATION) != 0
+ if (isAnnotation) AnnotationClass.tpe :: ClassfileAnnotationClass.tpe :: ifaces
+ else (if (jsuperclazz == null) AnyClass.tpe else typeToScala(jsuperclazz)) :: ifaces
} finally {
parentsLevel -= 1
}
@@ -691,6 +688,11 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
def enter(sym: Symbol, mods: Int) =
(if (jModifier.isStatic(mods)) module.moduleClass else clazz).info.decls enter sym
+ def enterEmptyCtorIfNecessary(): Unit = {
+ if (jclazz.getConstructors.isEmpty)
+ clazz.info.decls.enter(clazz.newClassConstructor(NoPosition))
+ }
+
for (jinner <- jclazz.getDeclaredClasses) {
jclassAsScala(jinner) // inner class is entered as a side-effect
// no need to call enter explicitly
@@ -707,6 +709,8 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
for (jconstr <- jclazz.getConstructors)
enter(jconstrAsScala(jconstr), jconstr.getModifiers)
+ enterEmptyCtorIfNecessary()
+
} :: pendingLoadActions
if (parentsLevel == 0) {
@@ -952,7 +956,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
val cls =
if (jclazz.isMemberClass && !nme.isImplClassName(jname))
lookupClass
- else if (jclazz.isLocalClass0 || isInvalidClassName(jname))
+ else if (jclazz.isLocalClass0 || scalacShouldntLoadClass(jname))
// local classes and implementation classes not preserved by unpickling - treat as Java
//
// upd. but only if they cannot be loaded as top-level classes
diff --git a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
index 33ad6d2430..ffed3cc38e 100644
--- a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
+++ b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
@@ -76,4 +76,10 @@ private[scala] object ReflectionUtils {
accessor setAccessible true
accessor invoke outer
}
+
+ def isTraitImplementation(fileName: String) = fileName endsWith "$class.class"
+
+ def scalacShouldntLoadClassfile(fileName: String) = isTraitImplementation(fileName)
+
+ def scalacShouldntLoadClass(name: scala.reflect.internal.SymbolTable#Name) = scalacShouldntLoadClassfile(name + ".class")
}
diff --git a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
index 61663f6181..b895092639 100644
--- a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
@@ -4,6 +4,7 @@ package runtime
import internal.Flags
import java.lang.{Class => jClass, Package => jPackage}
import scala.collection.mutable
+import scala.reflect.runtime.ReflectionUtils.scalacShouldntLoadClass
private[reflect] trait SymbolLoaders { self: SymbolTable =>
@@ -89,14 +90,6 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
}
}
- /** Is the given name valid for a top-level class? We exclude names with embedded $-signs, because
- * these are nested classes or anonymous classes,
- */
- def isInvalidClassName(name: Name) = {
- val dp = name pos '$'
- 0 < dp && dp < (name.length - 1)
- }
-
class PackageScope(pkgClass: Symbol) extends Scope(initFingerPrints = -1L) // disable fingerprinting as we do not know entries beforehand
with SynchronizedScope {
assert(pkgClass.isType)
@@ -106,7 +99,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
val e = super.lookupEntry(name)
if (e != null)
e
- else if (isInvalidClassName(name) || (negatives contains name))
+ else if (scalacShouldntLoadClass(name) || (negatives contains name))
null
else {
val path =