summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJosh Suereth <joshua.suereth@gmail.com>2012-06-18 22:28:31 -0400
committerJosh Suereth <joshua.suereth@gmail.com>2012-06-18 22:28:31 -0400
commite3cecc751a397e9ea17bf21ec169e98d1b472850 (patch)
treedbbcf0d5618f43514d55f08e1d8c57391661cf72
parent82032eb2696c44490d8504059dc95790225f79bf (diff)
parentef98fa3af90a30e745bd6a5e944b3351be46391f (diff)
downloadscala-e3cecc751a397e9ea17bf21ec169e98d1b472850.tar.gz
scala-e3cecc751a397e9ea17bf21ec169e98d1b472850.tar.bz2
scala-e3cecc751a397e9ea17bf21ec169e98d1b472850.zip
Merge branch 'master' into sbt-reflecton-fix
-rw-r--r--build.xml216
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Modes.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala3
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala5
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala15
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala40
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala15
-rw-r--r--src/library/scala/Predef.scala52
-rw-r--r--src/library/scala/collection/GenTraversableOnce.scala19
-rw-r--r--src/library/scala/collection/Iterator.scala3
-rw-r--r--src/library/scala/collection/MapLike.scala28
-rw-r--r--src/library/scala/collection/TraversableLike.scala7
-rw-r--r--src/library/scala/collection/TraversableOnce.scala17
-rw-r--r--src/library/scala/collection/concurrent/TrieMap.scala6
-rw-r--r--src/library/scala/collection/immutable/Vector.scala2
-rw-r--r--src/library/scala/collection/mutable/ArrayOps.scala2
-rw-r--r--src/library/scala/collection/mutable/LinkedHashMap.scala21
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala6
-rw-r--r--src/library/scala/collection/parallel/immutable/ParVector.scala2
-rw-r--r--src/library/scala/concurrent/impl/Future.scala106
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala25
-rw-r--r--src/reflect/scala/reflect/api/Symbols.scala17
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala2
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala2
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverse.scala6
-rw-r--r--test/benchmarking/ParCtrie-size.scala11
-rw-r--r--test/files/continuations-neg/ts-1681-nontail-return.check4
-rw-r--r--test/files/continuations-neg/ts-1681-nontail-return.scala18
-rw-r--r--test/files/continuations-run/ts-1681-2.check5
-rw-r--r--test/files/continuations-run/ts-1681-2.scala44
-rw-r--r--test/files/continuations-run/ts-1681-3.check4
-rw-r--r--test/files/continuations-run/ts-1681-3.scala27
-rw-r--r--test/files/continuations-run/ts-1681.check3
-rw-r--r--test/files/continuations-run/ts-1681.scala29
-rw-r--r--test/files/presentation/ide-bug-1000531.check4
-rw-r--r--test/files/run/collection-conversions.check104
-rw-r--r--test/files/run/collection-conversions.scala60
-rw-r--r--test/files/run/t4954.scala45
-rw-r--r--test/files/run/t5912.scala6
40 files changed, 806 insertions, 181 deletions
diff --git a/build.xml b/build.xml
index 4929a11883..33ebbfe377 100644
--- a/build.xml
+++ b/build.xml
@@ -55,6 +55,18 @@ END-USER TARGETS
</antcall>
</target>
+
+ <target name="partialdist" depends="dist.partial"
+ description="Makes a new distribution without documentation, so just for testing."/>
+
+ <target name="partialdist-opt"
+ description="Makes a new optimised distribution without testing it or removing partially build elements.">
+ <antcall target="partialdist">
+ <param name="scalac.args.optimise" value="-optimise"/>
+ </antcall>
+ </target>
+
+
<target name="fastdist" depends="dist.done"
description="Makes a new distribution without testing it or removing partially build elements."/>
@@ -123,10 +135,30 @@ END-USER TARGETS
<antcall target="palo.done"/>
</target>
- <target name="fastlocker"
+ <target name="fastlocker.lib"
+ description="Buildlocker without extra fuss">
+ <property name="fastlocker" value="true"/>
+ <antcall target="locker.unlock"/>
+ <antcall target="locker.lib"/>
+ </target>
+
+ <target name="fastlocker.reflect"
+ description="Buildlocker without extra fuss">
+ <property name="fastlocker" value="true"/>
+ <antcall target="locker.unlock"/>
+ <antcall target="locker.reflect"/>
+ </target>
+
+ <target name="fastlocker.comp"
description="Buildlocker without extra fuss">
+ <property name="fastlocker" value="true"/>
<antcall target="locker.unlock"/>
- <antcall target="locker.done"/>
+ <antcall target="locker.comp"/>
+ </target>
+
+ <target name="fastlocker"
+ description="Buildlocker without extra fuss">
+ <antcall target="fastlocker.comp"/>
</target>
<target name="buildlocker"
@@ -334,16 +366,31 @@ INITIALISATION
<target name="init.version.done" depends="init.version.release, init.version.snapshot"/>
<target name="init.testjava6">
- <fail message="This build requires JDK 1.6">
- <condition>
- <not>
+ <condition property="has.java6">
<equals arg1="${ant.java.version}" arg2="1.6"/>
- </not>
</condition>
- </fail>
+ <condition property="has.java7">
+ <equals arg1="${ant.java.version}" arg2="1.7"/>
+ </condition>
+ <condition property="has.unsupported.jdk">
+ <not><or>
+ <isset property="has.java7" />
+ <isset property="has.java6" />
+ </or></not>
+ </condition>
</target>
- <target name="init" depends="init.jars, init.maven.jars, init.version.done">
+ <target name="init.fail.bad.jdk" depends="init.testjava6">
+ <fail if="has.unsupported.jdk"
+ message="JDK ${ant.java.version} is not supported by this build!"/>
+ </target>
+ <target name="init.warn.jdk7" depends="init.testjava6" if="has.java7">
+ <echo level="warning"> You are using JDK7 for this build. While this will be able to build most of Scala, it will
+ not build the Swing project. You will be unable to create a distribution.
+ </echo>
+ </target>
+
+ <target name="init" depends="init.jars, init.maven.jars, init.version.done, init.fail.bad.jdk, init.warn.jdk7">
<property name="scalac.args.always" value="" />
<!-- scalac.args.optimise is selectively overridden in certain antcall tasks. -->
<property name="scalac.args.optimise" value=""/>
@@ -467,7 +514,7 @@ LOCAL DEPENDENCY (Adapted ASM)
<!-- ===========================================================================
LOCAL DEPENDENCY (FORKJOIN)
============================================================================ -->
- <target name="forkjoin.start" depends="init, init.testjava6">
+ <target name="forkjoin.start" depends="init">
<uptodate property="forkjoin.available" targetfile="${build-libs.dir}/forkjoin.complete">
<srcfiles dir="${src.dir}/forkjoin">
<include name="**/*.java"/>
@@ -477,6 +524,7 @@ LOCAL DEPENDENCY (FORKJOIN)
</target>
<target name="forkjoin.lib" depends="forkjoin.start" unless="forkjoin.available">
+ <stopwatch name="forkjoin.lib.timer"/>
<mkdir dir="${build-libs.dir}/classes/forkjoin"/>
<javac
fork="yes"
@@ -490,6 +538,7 @@ LOCAL DEPENDENCY (FORKJOIN)
<compilerarg line="${javac.args} -XDignore.symbol.file"/>
</javac>
<touch file="${build-libs.dir}/forkjoin.complete" verbose="no"/>
+ <stopwatch name="forkjoin.lib.timer" action="total"/>
</target>
<target name="forkjoin.pack" depends="forkjoin.lib">
@@ -524,6 +573,7 @@ LOCAL DEPENDENCY (FJBG)
</target>
<target name="fjbg.lib" depends="fjbg.init" unless="fjbg.available">
+ <stopwatch name="fjbg.lib.timer" />
<mkdir dir="${build-libs.dir}/classes/fjbg"/>
<javac
srcdir="${src.dir}/fjbg"
@@ -535,6 +585,7 @@ LOCAL DEPENDENCY (FJBG)
<compilerarg line="${javac.args} -XDignore.symbol.file"/>
</javac>
<touch file="${build-libs.dir}/fjbg.complete" verbose="no"/>
+ <stopwatch name="fjbg.lib.timer" action="total"/>
</target>
<target name="fjbg.pack" depends="fjbg.lib">
@@ -677,33 +728,43 @@ LOCAL REFERENCE BUILD (LOCKER)
<target name="locker.comp" depends="locker.pre-comp" if="locker.comp.needed">
<stopwatch name="locker.comp.timer"/>
<mkdir dir="${build-locker.dir}/classes/compiler"/>
- <!-- Compile MSIL inside of locker.... -->
- <javac
- srcdir="${src.dir}/msil"
- destdir="${build-locker.dir}/classes/compiler"
- classpath="${build-locker.dir}/classes/compiler"
- includes="**/*.java"
- excludes="**/tests/**"
- debug="true"
- target="1.5" source="1.4">
- <compilerarg line="${javac.args}"/>
- </javac>
- <scalacfork
- destdir="${build-locker.dir}/classes/compiler"
- compilerpathref="starr.classpath"
- params="${scalac.args.all}"
- srcdir="${src.dir}/msil"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-locker.dir}/classes/library"/>
- <pathelement location="${build-locker.dir}/classes/reflect"/>
- <pathelement location="${build-locker.dir}/classes/compiler"/>
- <path refid="fjbg.classpath"/>
- <path refid="aux.libs"/>
- <pathelement location="${jline.jar}"/>
- </compilationpath>
- </scalacfork>
+ <if>
+ <equals arg1="${fastlocker}" arg2="true" />
+ <then>
+ <!-- Fastlocker build: don't compile MSIL, use its starr version.... -->
+ <property name="locker.comp.msil" value="${msil.starr.jar}"/>
+ </then>
+ <else>
+ <!-- Regular build: Compile MSIL inside of locker.... -->
+ <javac
+ srcdir="${src.dir}/msil"
+ destdir="${build-locker.dir}/classes/compiler"
+ classpath="${build-locker.dir}/classes/compiler"
+ includes="**/*.java"
+ excludes="**/tests/**"
+ debug="true"
+ target="1.5" source="1.4">
+ <compilerarg line="${javac.args}"/>
+ </javac>
+ <scalacfork
+ destdir="${build-locker.dir}/classes/compiler"
+ compilerpathref="starr.classpath"
+ params="${scalac.args.all}"
+ srcdir="${src.dir}/msil"
+ jvmargs="${scalacfork.jvmargs}">
+ <include name="**/*.scala"/>
+ <compilationpath>
+ <pathelement location="${build-locker.dir}/classes/library"/>
+ <pathelement location="${build-locker.dir}/classes/reflect"/>
+ <pathelement location="${build-locker.dir}/classes/compiler"/>
+ <path refid="fjbg.classpath"/>
+ <path refid="aux.libs"/>
+ <pathelement location="${jline.jar}"/>
+ </compilationpath>
+ </scalacfork>
+ <property name="locker.comp.msil" value="${build-locker.dir}/classes/compiler"/>
+ </else>
+ </if>
<scalacfork
destdir="${build-locker.dir}/classes/compiler"
compilerpathref="starr.classpath"
@@ -718,6 +779,7 @@ LOCAL REFERENCE BUILD (LOCKER)
<path refid="fjbg.classpath"/>
<path refid="aux.libs"/>
<path refid="asm.classpath"/>
+ <pathelement location="${locker.comp.msil}" />
<pathelement location="${jline.jar}"/>
</compilationpath>
</scalacfork>
@@ -978,15 +1040,6 @@ QUICK BUILD (QUICK)
<include name="**/*.scala"/>
<compilationpath refid="quick.compilation.path"/>
</scalacfork>
- <scalacfork
- destdir="${build-quick.dir}/classes/library"
- compilerpathref="locker.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/swing"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="quick.compilation.path"/>
- </scalacfork>
<propertyfile file="${build-quick.dir}/classes/library/library.properties">
<entry key="version.number" value="${version.number}"/>
<entry key="maven.version.number" value="${maven.version.number}"/>
@@ -1001,12 +1054,26 @@ QUICK BUILD (QUICK)
<include name="**/*.css"/>
</fileset>
</copy>
- <touch file="${build-quick.dir}/library.complete" verbose="no"/>
- <stopwatch name="quick.lib.timer" action="total"/>
</target>
+ <target name="quick.swing" depends="quick.lib" if="has.java6" unless="quick.lib.available">
+ <scalacfork
+ destdir="${build-quick.dir}/classes/library"
+ compilerpathref="locker.classpath"
+ params="${scalac.args.quick}"
+ srcdir="${src.dir}/swing"
+ jvmargs="${scalacfork.jvmargs}">
+ <include name="**/*.scala"/>
+ <compilationpath refid="quick.compilation.path"/>
+ </scalacfork>
+ </target>
+
+ <target name="quick.lib.done" depends="quick.swing, quick.lib">
+ <stopwatch name="quick.lib.timer" action="total"/>
+ <touch file="${build-quick.dir}/library.complete" verbose="no"/>
+ </target>
- <target name="quick.pre-reflect" depends="quick.lib">
+ <target name="quick.pre-reflect" depends="quick.lib.done">
<uptodate property="quick.reflect.available" targetfile="${build-quick.dir}/reflect.complete">
<srcfiles dir="${src.dir}">
<include name="reflect/**"/>
@@ -1412,11 +1479,6 @@ PACKED QUICK BUILD (PACK)
</fileset>
<fileset dir="${build-libs.dir}/classes/forkjoin"/>
</jar>
- <jar destfile="${build-pack.dir}/lib/scala-swing.jar">
- <fileset dir="${build-quick.dir}/classes/library">
- <include name="scala/swing/**"/>
- </fileset>
- </jar>
<jar destfile="${build-pack.dir}/lib/scala-actors.jar">
<fileset dir="${build-quick.dir}/classes/library">
<include name="scala/actors/**"/>
@@ -1427,7 +1489,15 @@ PACKED QUICK BUILD (PACK)
</jar>
</target>
- <target name="pack.pre-reflect" depends="pack.lib">
+ <target name="pack.swing" depends="pack.lib" if="has.java6">
+ <jar destfile="${build-pack.dir}/lib/scala-swing.jar">
+ <fileset dir="${build-quick.dir}/classes/library">
+ <include name="scala/swing/**"/>
+ </fileset>
+ </jar>
+ </target>
+
+ <target name="pack.pre-reflect" depends="pack.lib, pack.swing">
<uptodate
property="pack.reflect.available"
targetfile="${build-pack.dir}/lib/scala-reflect.jar"
@@ -1643,15 +1713,6 @@ BOOTSTRAPPING BUILD (STRAP)
<include name="**/*.scala"/>
<compilationpath refid="strap.compilation.path"/>
</scalacfork>
- <scalacfork
- destdir="${build-strap.dir}/classes/library"
- compilerpathref="pack.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/swing"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="strap.compilation.path"/>
- </scalacfork>
<propertyfile file="${build-strap.dir}/classes/library/library.properties">
<entry key="version.number" value="${version.number}"/>
<entry key="maven.version.number" value="${maven.version.number}"/>
@@ -1666,11 +1727,26 @@ BOOTSTRAPPING BUILD (STRAP)
<include name="**/*.css"/>
</fileset>
</copy>
+ </target>
+
+ <target name="strap.swing" if="has.java6" unless="strap.lib.available" depends="strap.lib">
+ <scalacfork
+ destdir="${build-strap.dir}/classes/library"
+ compilerpathref="pack.classpath"
+ params="${scalac.args.quick}"
+ srcdir="${src.dir}/swing"
+ jvmargs="${scalacfork.jvmargs}">
+ <include name="**/*.scala"/>
+ <compilationpath refid="strap.compilation.path"/>
+ </scalacfork>
+ </target>
+
+ <target name="strap.lib.done" depends="strap.swing, strap.lib">
<touch file="${build-strap.dir}/library.complete" verbose="no"/>
<stopwatch name="strap.lib.timer" action="total"/>
</target>
- <target name="strap.pre-reflect" depends="strap.lib">
+ <target name="strap.pre-reflect" depends="strap.lib.done">
<uptodate property="strap.reflect.available" targetfile="${build-strap.dir}/reflect.complete">
<srcfiles dir="${src.dir}/reflect"/>
</uptodate>
@@ -2348,7 +2424,7 @@ BOOTRAPING TEST AND TEST SUITE
DISTRIBUTION
============================================================================ -->
- <target name="dist.start" depends="docs.done, pack.done">
+ <target name="dist.start" depends="pack.done">
<property name="dist.name" value="scala-${version.number}"/>
<property name="dist.dir" value="${dists.dir}/${dist.name}"/>
</target>
@@ -2373,7 +2449,7 @@ DISTRIBUTION
</copy>
</target>
- <target name="dist.doc" depends="dist.base">
+ <target name="dist.doc" depends="dist.base, docs.done">
<mkdir dir="${dist.dir}/doc/scala-devel-docs"/>
<copy file="${docs.dir}/LICENSE" toDir="${dist.dir}/doc"/>
<copy file="${docs.dir}/README" toDir="${dist.dir}/doc"/>
@@ -2432,11 +2508,11 @@ DISTRIBUTION
</jar>
</target>
- <target name="dist.latest.unix" depends="dist.src" unless="os.win">
+ <target name="dist.latest.unix" depends="dist.base" unless="os.win">
<symlink link="${dists.dir}/latest" resource="${dist.name}" overwrite="yes"/>
</target>
- <target name="dist.latest.win" depends="dist.src" if="os.win">
+ <target name="dist.latest.win" depends="dist.base" if="os.win">
<copy todir="${dists.dir}/latest">
<fileset dir="${dist.dir}"/>
</copy>
@@ -2444,7 +2520,9 @@ DISTRIBUTION
<target name="dist.latest" depends="dist.latest.unix,dist.latest.win"/>
- <target name="dist.done" depends="dist.latest"/>
+ <target name="dist.partial" depends="dist.base, dist.latest"/>
+
+ <target name="dist.done" depends="dist.latest, dist.src"/>
<target name="dist.clean">
<delete dir="${dists.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index aa63f3ec31..0ea46f1de4 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -1319,7 +1319,7 @@ trait Implicits {
if (full) {
val cm = typed(Ident(ReflectRuntimeCurrentMirror))
gen.mkMethodCall(ReflectRuntimeUniverse, nme.typeTagToManifest, List(tp), List(cm, tagInScope))
- } else gen.mkMethodCall(ReflectRuntimeUniverse, nme.classTagToClassManifest, List(tp), List(tagInScope))
+ } else gen.mkMethodCall(ReflectBasis, nme.classTagToClassManifest, List(tp), List(tagInScope))
wrapResult(interop)
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Modes.scala b/src/compiler/scala/tools/nsc/typechecker/Modes.scala
index 3eff5ef024..bde3ad98c9 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Modes.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Modes.scala
@@ -86,6 +86,10 @@ trait Modes {
*/
final val TYPEPATmode = 0x10000
+ /** RETmode is set when we are typing a return expression.
+ */
+ final val RETmode = 0x20000
+
final private val StickyModes = EXPRmode | PATTERNmode | TYPEmode | ALTmode
final def onlyStickyModes(mode: Int) =
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 2bdae4164a..1193d3013a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -3987,7 +3987,8 @@ trait Typers extends Modes with Adaptations with Tags {
ReturnWithoutTypeError(tree, enclMethod.owner)
} else {
context.enclMethod.returnsSeen = true
- val expr1: Tree = typed(expr, EXPRmode | BYVALmode, restpt.tpe)
+ val expr1: Tree = typed(expr, EXPRmode | BYVALmode | RETmode, restpt.tpe)
+
// Warn about returning a value if no value can be returned.
if (restpt.tpe.typeSymbol == UnitClass) {
// The typing in expr1 says expr is Unit (it has already been coerced if
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index 7cf515425d..278f4e3ff7 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -126,7 +126,10 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
wrapper(currentTyper.silent(_.typed(expr, analyzer.EXPRmode, pt)) match {
case analyzer.SilentResultValue(result) =>
trace("success: ")(showAttributed(result, true, true, settings.Yshowsymkinds.value))
- var Block(dummies, unwrapped) = result
+ var (dummies, unwrapped) = result match {
+ case Block(dummies, unwrapped) => (dummies, unwrapped)
+ case unwrapped => (Nil, unwrapped)
+ }
var invertedIndex = freeTerms map (_.swap)
// todo. also fixup singleton types
unwrapped = new Transformer {
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
index 862b19d0a4..5b8e9baa21 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
@@ -170,6 +170,9 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
vprintln("yes we can!! (byval)")
return true
}
+ } else if ((mode & global.analyzer.RETmode) != 0) {
+ vprintln("yes we can!! (return)")
+ return true
}
}
false
@@ -183,6 +186,7 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
val patMode = (mode & global.analyzer.PATTERNmode) != 0
val exprMode = (mode & global.analyzer.EXPRmode) != 0
val byValMode = (mode & global.analyzer.BYVALmode) != 0
+ val retMode = (mode & global.analyzer.RETmode) != 0
val annotsTree = cpsParamAnnotation(tree.tpe)
val annotsExpected = cpsParamAnnotation(pt)
@@ -209,6 +213,12 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
val res = tree modifyType addMinusMarker
vprintln("adapted annotations (by val) of " + tree + " to " + res.tpe)
res
+ } else if (retMode && !hasPlusMarker(tree.tpe) && annotsTree.isEmpty && annotsExpected.nonEmpty) {
+ // add a marker annotation that will make tree.tpe behave as pt, subtyping wise
+ // tree will look like having no annotation
+ val res = tree modifyType (_ withAnnotations List(newPlusMarker()))
+ vprintln("adapted annotations (return) of " + tree + " to " + res.tpe)
+ res
} else tree
}
@@ -464,6 +474,11 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
}
tpe
+ case ret @ Return(expr) =>
+ if (hasPlusMarker(expr.tpe))
+ ret setType expr.tpe
+ ret.tpe
+
case _ =>
tpe
}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
index 3a1dc87a6a..765cde5a81 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
@@ -3,6 +3,7 @@
package scala.tools.selectivecps
import scala.tools.nsc.Global
+import scala.collection.mutable.ListBuffer
trait CPSUtils {
val global: Global
@@ -135,4 +136,43 @@ trait CPSUtils {
case _ => None
}
}
+
+ def isTailReturn(retExpr: Tree, body: Tree): Boolean = {
+ val removed = ListBuffer[Tree]()
+ removeTailReturn(body, removed)
+ removed contains retExpr
+ }
+
+ def removeTailReturn(tree: Tree, removed: ListBuffer[Tree]): Tree = tree match {
+ case Block(stms, r @ Return(expr)) =>
+ removed += r
+ treeCopy.Block(tree, stms, expr)
+
+ case Block(stms, expr) =>
+ treeCopy.Block(tree, stms, removeTailReturn(expr, removed))
+
+ case If(cond, r1 @ Return(thenExpr), r2 @ Return(elseExpr)) =>
+ removed ++= Seq(r1, r2)
+ treeCopy.If(tree, cond, removeTailReturn(thenExpr, removed), removeTailReturn(elseExpr, removed))
+
+ case If(cond, thenExpr, elseExpr) =>
+ treeCopy.If(tree, cond, removeTailReturn(thenExpr, removed), removeTailReturn(elseExpr, removed))
+
+ case Try(block, catches, finalizer) =>
+ treeCopy.Try(tree,
+ removeTailReturn(block, removed),
+ (catches map (t => removeTailReturn(t, removed))).asInstanceOf[List[CaseDef]],
+ removeTailReturn(finalizer, removed))
+
+ case CaseDef(pat, guard, r @ Return(expr)) =>
+ removed += r
+ treeCopy.CaseDef(tree, pat, guard, expr)
+
+ case CaseDef(pat, guard, body) =>
+ treeCopy.CaseDef(tree, pat, guard, removeTailReturn(body, removed))
+
+ case _ =>
+ tree
+ }
+
}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
index 017c8d24fd..fe465aad0d 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
@@ -9,6 +9,8 @@ import scala.tools.nsc.plugins._
import scala.tools.nsc.ast._
+import scala.collection.mutable.ListBuffer
+
/**
* In methods marked @cps, explicitly name results of calls to other @cps methods
*/
@@ -46,10 +48,20 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
// this would cause infinite recursion. But we could remove the
// ValDef case here.
- case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs0) =>
debuglog("transforming " + dd.symbol)
atOwner(dd.symbol) {
+ val tailReturns = ListBuffer[Tree]()
+ val rhs = removeTailReturn(rhs0, tailReturns)
+ // throw an error if there is a Return tree which is not in tail position
+ rhs0 foreach {
+ case r @ Return(_) =>
+ if (!tailReturns.contains(r))
+ unit.error(r.pos, "return expressions in CPS code must be in tail position")
+ case _ => /* do nothing */
+ }
+
val rhs1 = transExpr(rhs, None, getExternalAnswerTypeAnn(tpt.tpe))
debuglog("result "+rhs1)
@@ -153,7 +165,6 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
}
}
-
def transExpr(tree: Tree, cpsA: CPSInfo, cpsR: CPSInfo): Tree = {
transTailValue(tree, cpsA, cpsR) match {
case (Nil, b) => b
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index 65e8549ae1..99bd7f0736 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -121,16 +121,16 @@ object Predef extends LowPriorityImplicits {
// Deprecated
- @deprecated("Use sys.error(message) instead", "2.9.0")
+ @deprecated("Use `sys.error(message)` instead", "2.9.0")
def error(message: String): Nothing = sys.error(message)
- @deprecated("Use sys.exit() instead", "2.9.0")
+ @deprecated("Use `sys.exit()` instead", "2.9.0")
def exit(): Nothing = sys.exit()
- @deprecated("Use sys.exit(status) instead", "2.9.0")
+ @deprecated("Use `sys.exit(status)` instead", "2.9.0")
def exit(status: Int): Nothing = sys.exit(status)
- @deprecated("Use formatString.format(args: _*) or arg.formatted(formatString) instead", "2.9.0")
+ @deprecated("Use `formatString.format(args: _*)` or `arg.formatted(formatString)` instead", "2.9.0")
def format(text: String, xs: Any*) = augmentString(text).format(xs: _*)
// errors and asserts -------------------------------------------------
@@ -219,7 +219,7 @@ object Predef extends LowPriorityImplicits {
final class Ensuring[A](val __resultOfEnsuring: A) extends AnyVal {
// `__resultOfEnsuring` must be a public val to allow inlining.
// See comments in ArrowAssoc for more.
- @deprecated("Use __resultOfEnsuring instead", "2.10.0")
+ @deprecated("Use `__resultOfEnsuring` instead", "2.10.0")
def x = __resultOfEnsuring
def ensuring(cond: Boolean): A = { assert(cond); __resultOfEnsuring }
@@ -255,7 +255,7 @@ object Predef extends LowPriorityImplicits {
// being confused why they get an ambiguous implicit conversion
// error. (`foo.x` used to produce this error since both
// any2Ensuring and any2ArrowAssoc pimped an `x` onto everything)
- @deprecated("Use __leftOfArrow instead", "2.10.0")
+ @deprecated("Use `__leftOfArrow` instead", "2.10.0")
def x = __leftOfArrow
@inline def -> [B](y: B): Tuple2[A, B] = Tuple2(__leftOfArrow, y)
@@ -320,30 +320,30 @@ object Predef extends LowPriorityImplicits {
// Primitive Widenings --------------------------------------------------------------
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def byte2short(x: Byte): Short = x.toShort
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def byte2int(x: Byte): Int = x.toInt
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def byte2long(x: Byte): Long = x.toLong
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def byte2float(x: Byte): Float = x.toFloat
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def byte2double(x: Byte): Double = x.toDouble
+ @deprecated("Use `.toShort` for explicit conversion and `Byte.byte2short` for implicit conversion", "2.10.0") def byte2short(x: Byte): Short = x.toShort
+ @deprecated("Use `.toInt` for explicit conversion and `Byte.byte2int` for implicit conversion", "2.10.0") def byte2int(x: Byte): Int = x.toInt
+ @deprecated("Use `.toLong` for explicit conversion and `Byte.byte2long for implicit conversion", "2.10.0") def byte2long(x: Byte): Long = x.toLong
+ @deprecated("Use `.toFloat` for explicit conversion and `Byte.byte2float` for implicit conversion", "2.10.0") def byte2float(x: Byte): Float = x.toFloat
+ @deprecated("Use `.toDouble` for explicit conversion and `Byte.byte2double` for implicit conversion", "2.10.0") def byte2double(x: Byte): Double = x.toDouble
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def short2int(x: Short): Int = x.toInt
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def short2long(x: Short): Long = x.toLong
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def short2float(x: Short): Float = x.toFloat
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def short2double(x: Short): Double = x.toDouble
+ @deprecated("Use `.toInt` for explicit conversion and `Short.short2int` for implicit conversion", "2.10.0") def short2int(x: Short): Int = x.toInt
+ @deprecated("Use `.toLong` for explicit conversion and `Short.short2long` for implicit conversion", "2.10.0") def short2long(x: Short): Long = x.toLong
+ @deprecated("Use `.toFloat` for explicit conversion and `Short.short2float` for implicit conversion", "2.10.0") def short2float(x: Short): Float = x.toFloat
+ @deprecated("Use `.toDouble` for explicit conversion and `Short.short2double` for implicit conversion", "2.10.0") def short2double(x: Short): Double = x.toDouble
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def char2int(x: Char): Int = x.toInt
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def char2long(x: Char): Long = x.toLong
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def char2float(x: Char): Float = x.toFloat
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def char2double(x: Char): Double = x.toDouble
+ @deprecated("Use `.toInt` for explicit conversion and `Char.char2int` for implicit conversion", "2.10.0") def char2int(x: Char): Int = x.toInt
+ @deprecated("Use `.toLong` for explicit conversion and `Char.char2long` for implicit conversion", "2.10.0") def char2long(x: Char): Long = x.toLong
+ @deprecated("Use `.toFloat` for explicit conversion and `Char.char2float` for implicit conversion", "2.10.0") def char2float(x: Char): Float = x.toFloat
+ @deprecated("Use `.toDouble` for explicit conversion and `Char.char2double` for implicit conversion", "2.10.0") def char2double(x: Char): Double = x.toDouble
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def int2long(x: Int): Long = x.toLong
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def int2float(x: Int): Float = x.toFloat
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def int2double(x: Int): Double = x.toDouble
+ @deprecated("Use `.toLong` for explicit conversion and `Int.int2long` for implicit conversion", "2.10.0") def int2long(x: Int): Long = x.toLong
+ @deprecated("Use `.toFloat` for explicit conversion and `Int.int2float` for implicit conversion", "2.10.0") def int2float(x: Int): Float = x.toFloat
+ @deprecated("Use `.toDouble` for explicit conversion and `Int.int2double` for implicit conversion", "2.10.0") def int2double(x: Int): Double = x.toDouble
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def long2float(x: Long): Float = x.toFloat
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def long2double(x: Long): Double = x.toDouble
+ @deprecated("Use `.toFloat` for explicit conversion and `Long.long2float` for implicit conversion", "2.10.0") def long2float(x: Long): Float = x.toFloat
+ @deprecated("Use `.toDouble` for explicit conversion and `Long.long2double` for implicit conversion", "2.10.0") def long2double(x: Long): Double = x.toDouble
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def float2double(x: Float): Double = x.toDouble
+ @deprecated("Use `.toDouble` for explicit conversion and `Float.float2double` for implicit conversion", "2.10.0") def float2double(x: Float): Double = x.toDouble
// "Autoboxing" and "Autounboxing" ---------------------------------------------------
@@ -385,7 +385,7 @@ object Predef extends LowPriorityImplicits {
implicit def any2stringadd(x: Any) = new runtime.StringAdd(x)
implicit def unaugmentString(x: StringOps): String = x.repr
- @deprecated("Use StringCanBuildFrom", "2.10.0")
+ @deprecated("Use `StringCanBuildFrom`", "2.10.0")
def stringCanBuildFrom: CanBuildFrom[String, Char, String] = StringCanBuildFrom
implicit val StringCanBuildFrom: CanBuildFrom[String, Char, String] = new CanBuildFrom[String, Char, String] {
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
index eadacd9209..e475865391 100644
--- a/src/library/scala/collection/GenTraversableOnce.scala
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -9,6 +9,8 @@
package scala.collection
import scala.reflect.ClassTag
+import scala.collection.generic.CanBuildFrom
+import scala.annotation.unchecked.{ uncheckedVariance => uV }
/** A template trait for all traversable-once objects which may be
* traversed in parallel.
@@ -552,4 +554,21 @@ trait GenTraversableOnce[+A] extends Any {
* containing all key/value pairs of type `(T, U)` of this $coll.
*/
def toMap[K, V](implicit ev: A <:< (K, V)): GenMap[K, V]
+
+ /** Converts this $coll to a Vector.
+ * $willNotTerminateInf
+ * @return a vector containing all elements of this $coll.
+ */
+ def toVector: Vector[A]
+
+ /** Converts this $coll into another by copying all elements.
+ * @tparam Col The collection type to build.
+ * @return a new collection containing all elements of this $coll.
+ *
+ * @usecase def convertTo[Col[_]]: Col[A]
+ * @inheritdoc
+ * $willNotTerminateInf
+ * @return a new collection containing all elements of this $coll.
+ */
+ def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV]
}
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index b2bbc8d888..5f369de3b7 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -11,6 +11,8 @@ package scala.collection
import mutable.ArrayBuffer
import annotation.migration
import immutable.Stream
+import scala.collection.generic.CanBuildFrom
+import scala.annotation.unchecked.{ uncheckedVariance => uV }
/** The `Iterator` object provides various functions for creating specialized iterators.
*
@@ -1138,6 +1140,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
def toStream: Stream[A] =
if (self.hasNext) Stream.cons(self.next, self.toStream)
else Stream.empty[A]
+
/** Converts this iterator to a string.
*
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index b9b8f62574..75f9ff93db 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -227,30 +227,34 @@ self =>
def default(key: A): B =
throw new NoSuchElementException("key not found: " + key)
- /** Filters this map by retaining only keys satisfying a predicate.
- * @param p the predicate used to test keys
- * @return an immutable map consisting only of those key value pairs of this map where the key satisfies
- * the predicate `p`. The resulting map wraps the original map without copying any elements.
- */
- def filterKeys(p: A => Boolean): Map[A, B] = new AbstractMap[A, B] with DefaultMap[A, B] {
+ protected class FilteredKeys(p: A => Boolean) extends AbstractMap[A, B] with DefaultMap[A, B] {
override def foreach[C](f: ((A, B)) => C): Unit = for (kv <- self) if (p(kv._1)) f(kv)
def iterator = self.iterator.filter(kv => p(kv._1))
override def contains(key: A) = self.contains(key) && p(key)
def get(key: A) = if (!p(key)) None else self.get(key)
}
-
- /** Transforms this map by applying a function to every retrieved value.
- * @param f the function used to transform values of this map.
- * @return a map view which maps every key of this map
- * to `f(this(key))`. The resulting map wraps the original map without copying any elements.
+
+ /** Filters this map by retaining only keys satisfying a predicate.
+ * @param p the predicate used to test keys
+ * @return an immutable map consisting only of those key value pairs of this map where the key satisfies
+ * the predicate `p`. The resulting map wraps the original map without copying any elements.
*/
- def mapValues[C](f: B => C): Map[A, C] = new AbstractMap[A, C] with DefaultMap[A, C] {
+ def filterKeys(p: A => Boolean): Map[A, B] = new FilteredKeys(p)
+
+ protected class MappedValues[C](f: B => C) extends AbstractMap[A, C] with DefaultMap[A, C] {
override def foreach[D](g: ((A, C)) => D): Unit = for ((k, v) <- self) g((k, f(v)))
def iterator = for ((k, v) <- self.iterator) yield (k, f(v))
override def size = self.size
override def contains(key: A) = self.contains(key)
def get(key: A) = self.get(key).map(f)
}
+
+ /** Transforms this map by applying a function to every retrieved value.
+ * @param f the function used to transform values of this map.
+ * @return a map view which maps every key of this map
+ * to `f(this(key))`. The resulting map wraps the original map without copying any elements.
+ */
+ def mapValues[C](f: B => C): Map[A, C] = new MappedValues(f)
// The following 5 operations (updated, two times +, two times ++) should really be
// generic, returning This[B]. We need better covariance support to express that though.
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index 3716a318d9..e5861f5760 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -616,6 +616,13 @@ trait TraversableLike[+A, +Repr] extends Any
def toTraversable: Traversable[A] = thisCollection
def toIterator: Iterator[A] = toStream.iterator
def toStream: Stream[A] = toBuffer.toStream
+ // Override to provide size hint.
+ override def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = {
+ val b = cbf()
+ b.sizeHint(this)
+ b ++= thisCollection
+ b.result
+ }
/** Converts this $coll to a string.
*
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index 386ce2d95a..8dc6184d88 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -9,6 +9,7 @@
package scala.collection
import mutable.{ Buffer, Builder, ListBuffer, ArrayBuffer }
+import generic.CanBuildFrom
import annotation.unchecked.{ uncheckedVariance => uV }
import language.{implicitConversions, higherKinds}
import reflect.ClassTag
@@ -239,17 +240,25 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
def toTraversable: Traversable[A]
- def toList: List[A] = (new ListBuffer[A] ++= seq).toList
+ def toList: List[A] = convertTo[List]
def toIterable: Iterable[A] = toStream
def toSeq: Seq[A] = toStream
- def toIndexedSeq: immutable.IndexedSeq[A] = immutable.IndexedSeq() ++ seq
+ def toIndexedSeq: immutable.IndexedSeq[A] = convertTo[immutable.IndexedSeq]
- def toBuffer[B >: A]: mutable.Buffer[B] = new ArrayBuffer[B] ++= seq
+ def toBuffer[B >: A]: mutable.Buffer[B] = convertTo[ArrayBuffer].asInstanceOf[mutable.Buffer[B]]
- def toSet[B >: A]: immutable.Set[B] = immutable.Set() ++ seq
+ def toSet[B >: A]: immutable.Set[B] = convertTo[immutable.Set].asInstanceOf[immutable.Set[B]]
+
+ def toVector: Vector[A] = convertTo[Vector]
+
+ def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = {
+ val b = cbf()
+ b ++= seq
+ b.result
+ }
def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U] = {
val b = immutable.Map.newBuilder[T, U]
diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala
index 08e9125bd8..2d8217551a 100644
--- a/src/library/scala/collection/concurrent/TrieMap.scala
+++ b/src/library/scala/collection/concurrent/TrieMap.scala
@@ -473,7 +473,11 @@ extends CNodeBase[K, V] {
private def computeSize(ct: TrieMap[K, V]): Int = {
var i = 0
var sz = 0
- val offset = math.abs(util.Random.nextInt()) % array.length
+ val offset =
+ if (array.length > 0)
+ //util.Random.nextInt(array.length) /* <-- benchmarks show that this causes observable contention */
+ scala.concurrent.forkjoin.ThreadLocalRandom.current.nextInt(0, array.length)
+ else 0
while (i < array.length) {
val pos = (i + offset) % array.length
array(pos) match {
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index 1395a8f52d..d100bf93df 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -77,6 +77,8 @@ override def companion: GenericCompanion[Vector] = Vector
override def par = new ParVector(this)
+ override def toVector: Vector[A] = this
+
override def lengthCompare(len: Int): Int = length - len
private[collection] final def initIterator[B >: A](s: VectorIterator[B]) {
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index 01636eb54e..7a595f211d 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -64,7 +64,7 @@ abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] with CustomParalleliza
* @param asTrav A function that converts elements of this array to rows - arrays of type `U`.
* @return An array obtained by concatenating rows of this array.
*/
- def flatten[U, To](implicit asTrav: T => collection.Traversable[U], m: ClassTag[U]): Array[U] = {
+ def flatten[U](implicit asTrav: T => collection.Traversable[U], m: ClassTag[U]): Array[U] = {
val b = Array.newBuilder[U]
b.sizeHint(map{case is: collection.IndexedSeq[_] => is.size case _ => 0}.sum)
for (xs <- this)
diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala
index 4150cf9eba..5643e070f8 100644
--- a/src/library/scala/collection/mutable/LinkedHashMap.scala
+++ b/src/library/scala/collection/mutable/LinkedHashMap.scala
@@ -49,7 +49,8 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
with Map[A, B]
with MapLike[A, B, LinkedHashMap[A, B]]
with HashTable[A, LinkedEntry[A, B]]
- with Serializable {
+ with Serializable
+{
override def empty = LinkedHashMap.empty[A, B]
override def size = tableSize
@@ -107,7 +108,25 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
if (hasNext) { val res = (cur.key, cur.value); cur = cur.later; res }
else Iterator.empty.next
}
+
+ protected class FilteredKeys(p: A => Boolean) extends super.FilteredKeys(p) {
+ override def empty = LinkedHashMap.empty
+ }
+
+ override def filterKeys(p: A => Boolean): scala.collection.Map[A, B] = new FilteredKeys(p)
+ protected class MappedValues[C](f: B => C) extends super.MappedValues[C](f) {
+ override def empty = LinkedHashMap.empty
+ }
+
+ override def mapValues[C](f: B => C): scala.collection.Map[A, C] = new MappedValues(f)
+
+ protected class DefaultKeySet extends super.DefaultKeySet {
+ override def empty = LinkedHashSet.empty
+ }
+
+ override def keySet: scala.collection.Set[A] = new DefaultKeySet
+
override def keysIterator: Iterator[A] = new AbstractIterator[A] {
private var cur = firstEntry
def hasNext = cur ne null
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index a447f1b5e4..a7ec833193 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -851,6 +851,12 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def toMap[K, V](implicit ev: T <:< (K, V)): immutable.ParMap[K, V] = toParMap[K, V, immutable.ParMap[K, V]](() => immutable.ParMap.newCombiner[K, V])
+ // TODO(@alex22): make these better
+ override def toVector: Vector[T] = seq.toVector
+
+ override def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, T, Col[T @uncheckedVariance]]): Col[T @uncheckedVariance] = seq.convertTo[Col]
+
+
/* tasks */
protected trait StrictSplitterCheckTask[R, Tp] extends Task[R, Tp] {
diff --git a/src/library/scala/collection/parallel/immutable/ParVector.scala b/src/library/scala/collection/parallel/immutable/ParVector.scala
index 1ece663a1d..e4099f1809 100644
--- a/src/library/scala/collection/parallel/immutable/ParVector.scala
+++ b/src/library/scala/collection/parallel/immutable/ParVector.scala
@@ -62,6 +62,8 @@ extends ParSeq[T]
override def seq: Vector[T] = vector
+ override def toVector: Vector[T] = vector
+
class ParVectorIterator(_start: Int, _end: Int) extends VectorIterator[T](_start, _end) with SeqSplitter[T] {
def remaining: Int = remainingElementCount
def dup: SeqSplitter[T] = (new ParVector(remainingVector)).splitter
diff --git a/src/library/scala/concurrent/impl/Future.scala b/src/library/scala/concurrent/impl/Future.scala
index 47534e398b..6a3487adde 100644
--- a/src/library/scala/concurrent/impl/Future.scala
+++ b/src/library/scala/concurrent/impl/Future.scala
@@ -44,13 +44,13 @@ private[concurrent] object Future {
}
def boxedType(c: Class[_]): Class[_] = if (c.isPrimitive) toBoxed(c) else c
-
- def apply[T](body: =>T)(implicit executor: ExecutionContext): Future[T] = {
+
+ private[impl] class PromiseCompletingTask[T](override val executor: ExecutionContext, body: => T)
+ extends Task {
val promise = new Promise.DefaultPromise[T]()
- //TODO: use `dispatchFuture`?
- executor.execute(new Runnable {
- def run = promise complete {
+ protected override def task() = {
+ promise complete {
try Right(body) catch {
case NonFatal(e) =>
// Commenting out reporting for now, since it produces too much output in the tests
@@ -58,9 +58,14 @@ private[concurrent] object Future {
Left(e)
}
}
- })
-
- promise.future
+ }
+ }
+
+ def apply[T](body: =>T)(implicit executor: ExecutionContext): Future[T] = {
+ val task = new PromiseCompletingTask(executor, body)
+ task.dispatch()
+
+ task.promise.future
}
private[impl] val throwableId: Throwable => Throwable = identity _
@@ -70,38 +75,77 @@ private[concurrent] object Future {
// so that it can be stolen from
// OR: a push to the local task queue should be so cheap that this is
// not even needed, but stealing is still possible
- private val _taskStack = new ThreadLocal[Stack[() => Unit]]()
+
+ private[impl] case class TaskStack(stack: Stack[Task], executor: ExecutionContext)
+
+ private val _taskStack = new ThreadLocal[TaskStack]()
+
+ private[impl] trait Task extends Runnable {
+ def executor: ExecutionContext
+
+ // run the original callback (no dispatch)
+ protected def task(): Unit
+
+ // we implement Runnable to avoid creating
+ // an extra object. run() runs ourselves with
+ // a TaskStack pushed, and then runs any
+ // other tasks that show up in the stack.
+ final override def run() = {
+ try {
+ val taskStack = TaskStack(Stack[Task](this), executor)
+ _taskStack set taskStack
+ while (taskStack.stack.nonEmpty) {
+ val next = taskStack.stack.pop()
+ require(next.executor eq executor)
+ try next.task() catch { case NonFatal(e) => executor reportFailure e }
+ }
+ } finally {
+ _taskStack.remove()
+ }
+ }
+
+ // send the task to the running executor.execute() via
+ // _taskStack, or start a new executor.execute()
+ def dispatch(force: Boolean = false): Unit =
+ _taskStack.get match {
+ case stack if (stack ne null) && (executor eq stack.executor) && !force => stack.stack push this
+ case _ => executor.execute(this)
+ }
+ }
+
+ private[impl] class ReleaseTask(override val executor: ExecutionContext, val elems: List[Task])
+ extends Task {
+ protected override def task() = {
+ _taskStack.get.stack.elems = elems
+ }
+ }
private[impl] def releaseStack(executor: ExecutionContext): Unit =
_taskStack.get match {
- case stack if (stack ne null) && stack.nonEmpty =>
- val tasks = stack.elems
- stack.clear()
+ case stack if (stack ne null) && stack.stack.nonEmpty =>
+ val tasks = stack.stack.elems
+ stack.stack.clear()
_taskStack.remove()
- dispatchFuture(executor, () => _taskStack.get.elems = tasks, true)
+ val release = new ReleaseTask(executor, tasks)
+ release.dispatch(force=true)
case null =>
// do nothing - there is no local batching stack anymore
case _ =>
_taskStack.remove()
}
- private[impl] def dispatchFuture(executor: ExecutionContext, task: () => Unit, force: Boolean = false): Unit =
- _taskStack.get match {
- case stack if (stack ne null) && !force => stack push task // FIXME we can't mix tasks aimed for different ExecutionContexts see: https://github.com/akka/akka/blob/v2.0.1/akka-actor/src/main/scala/akka/dispatch/Future.scala#L373
- case _ => executor.execute(new Runnable {
- def run() {
- try {
- val taskStack = Stack[() => Unit](task)
- _taskStack set taskStack
- while (taskStack.nonEmpty) {
- val next = taskStack.pop()
- try next() catch { case NonFatal(e) => executor reportFailure e }
- }
- } finally {
- _taskStack.remove()
- }
- }
- })
+ private[impl] class OnCompleteTask[T](override val executor: ExecutionContext, val onComplete: (Either[Throwable, T]) => Any)
+ extends Task {
+ private var value: Either[Throwable, T] = null
+
+ protected override def task() = {
+ require(value ne null) // dispatch(value) must be called before dispatch()
+ onComplete(value)
}
-
+
+ def dispatch(value: Either[Throwable, T]): Unit = {
+ this.value = value
+ dispatch()
+ }
+ }
}
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index 1d573ef818..c5060a2368 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -94,10 +94,10 @@ object Promise {
val resolved = resolveEither(value)
(try {
@tailrec
- def tryComplete(v: Either[Throwable, T]): List[Either[Throwable, T] => Unit] = {
+ def tryComplete(v: Either[Throwable, T]): List[Future.OnCompleteTask[T]] = {
getState match {
case raw: List[_] =>
- val cur = raw.asInstanceOf[List[Either[Throwable, T] => Unit]]
+ val cur = raw.asInstanceOf[List[Future.OnCompleteTask[T]]]
if (updateState(cur, v)) cur else tryComplete(v)
case _ => null
}
@@ -108,32 +108,21 @@ object Promise {
}) match {
case null => false
case cs if cs.isEmpty => true
- // this assumes that f(resolved) will go via dispatchFuture
- // and notifyCompleted (see onComplete below)
- case cs => cs.foreach(f => f(resolved)); true
+ case cs => cs.foreach(c => c.dispatch(resolved)); true
}
}
def onComplete[U](func: Either[Throwable, T] => U)(implicit executor: ExecutionContext): Unit = {
- val bound: Either[Throwable, T] => Unit = (either: Either[Throwable, T]) =>
- Future.dispatchFuture(executor, () => notifyCompleted(func, either))
+ val bound = new Future.OnCompleteTask[T](executor, func)
@tailrec //Tries to add the callback, if already completed, it dispatches the callback to be executed
def dispatchOrAddCallback(): Unit =
getState match {
- case r: Either[_, _] => bound(r.asInstanceOf[Either[Throwable, T]])
+ case r: Either[_, _] => bound.dispatch(r.asInstanceOf[Either[Throwable, T]])
case listeners: List[_] => if (updateState(listeners, bound :: listeners)) () else dispatchOrAddCallback()
}
dispatchOrAddCallback()
}
-
- private final def notifyCompleted(func: Either[Throwable, T] => Any, result: Either[Throwable, T])(implicit executor: ExecutionContext) {
- try {
- func(result)
- } catch {
- case NonFatal(e) => executor reportFailure e
- }
- }
}
/** An already completed Future is given its result at creation.
@@ -149,8 +138,8 @@ object Promise {
def tryComplete(value: Either[Throwable, T]): Boolean = false
def onComplete[U](func: Either[Throwable, T] => U)(implicit executor: ExecutionContext): Unit = {
- val completedAs = value.get // Avoid closing over "this"
- Future.dispatchFuture(executor, () => func(completedAs))
+ val completedAs = value.get
+ (new Future.OnCompleteTask(executor, func)).dispatch(completedAs)
}
def ready(atMost: Duration)(implicit permit: CanAwait): this.type = this
diff --git a/src/reflect/scala/reflect/api/Symbols.scala b/src/reflect/scala/reflect/api/Symbols.scala
index 1d266dc778..b2eb598cc0 100644
--- a/src/reflect/scala/reflect/api/Symbols.scala
+++ b/src/reflect/scala/reflect/api/Symbols.scala
@@ -116,11 +116,28 @@ trait Symbols extends base.Symbols { self: Universe =>
*/
def isValue: Boolean
+ /** Does this symbol denote a stable value? */
+ def isStable: Boolean
+
/** Does this symbol represent a mutable value?
* If yes, `isTerm` and `isValue` are also guaranteed to be true.
*/
def isVariable: Boolean
+ /** Does this symbol represent a getter or a setter?
+ */
+ def isAccessor: Boolean
+
+ /** Does this symbol represent a getter of a field?
+ * If yes, `isTerm` and `isMethod` are also guaranteed to be true.
+ */
+ def isGetter: Boolean
+
+ /** Does this symbol represent a setter of a field?
+ * If yes, `isTerm` and `isMethod` are also guaranteed to be true.
+ */
+ def isSetter: Boolean
+
/** Does this symbol represent the definition of a package?
* If yes, `isTerm` is also guaranteed to be true.
*/
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index a9d9b06621..320cd3ddae 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -179,7 +179,7 @@ trait Definitions extends api.StandardDefinitions {
val EmptyPackage: ModuleSymbol = rootMirror.EmptyPackage
@deprecated("Moved to rootMirror.EmptyPackageClass", "2.10.0")
- val EmptyPackageClass: ClassSymbol = rootMirror.RootClass
+ val EmptyPackageClass: ClassSymbol = rootMirror.EmptyPackageClass
// It becomes tricky to create dedicated objects for other symbols because
// of initialization order issues.
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index cadd76b1ba..a0c57042f3 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -40,7 +40,7 @@ abstract class SymbolTable extends makro.Universe
{
val gen = new TreeGen { val global: SymbolTable.this.type = SymbolTable.this }
- val treeBuild = gen
+ lazy val treeBuild = gen
def log(msg: => AnyRef): Unit
def abort(msg: String): Nothing = throw new FatalError(supplementErrorMessage(msg))
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
index d4a83b960d..629df76178 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
@@ -12,12 +12,6 @@ class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.S
def picklerPhase = SomePhase
- type TreeGen = internal.TreeGen
-
- override type Position = scala.reflect.internal.util.Position
-
- override val gen = new TreeGen { val global: self.type = self }
-
lazy val settings = new Settings
def forInteractive = false
def forScaladoc = false
diff --git a/test/benchmarking/ParCtrie-size.scala b/test/benchmarking/ParCtrie-size.scala
index 5a6191fb62..3f47dc23fd 100644
--- a/test/benchmarking/ParCtrie-size.scala
+++ b/test/benchmarking/ParCtrie-size.scala
@@ -2,16 +2,18 @@
-import collection.parallel.mutable.ParCtrie
+import collection.parallel.mutable.ParTrieMap
object Size extends testing.Benchmark {
val length = sys.props("length").toInt
val par = sys.props("par").toInt
- var parctrie = ParCtrie((0 until length) zip (0 until length): _*)
+ var parctrie = ParTrieMap((0 until length) zip (0 until length): _*)
- collection.parallel.ForkJoinTasks.defaultForkJoinPool.setParallelism(par)
+ //collection.parallel.ForkJoinTasks.defaultForkJoinPool.setParallelism(par)
+ val ts = new collection.parallel.ForkJoinTaskSupport(new concurrent.forkjoin.ForkJoinPool(par))
+ parctrie.tasksupport = ts
def run = {
parctrie.size
@@ -21,7 +23,8 @@ object Size extends testing.Benchmark {
override def tearDown() {
iteration += 1
- if (iteration % 4 == 0) parctrie = ParCtrie((0 until length) zip (0 until length): _*)
+ if (iteration % 4 == 0) parctrie = ParTrieMap((0 until length) zip (0 until length): _*)
+ parctrie.tasksupport = ts
}
}
diff --git a/test/files/continuations-neg/ts-1681-nontail-return.check b/test/files/continuations-neg/ts-1681-nontail-return.check
new file mode 100644
index 0000000000..8fe15f154b
--- /dev/null
+++ b/test/files/continuations-neg/ts-1681-nontail-return.check
@@ -0,0 +1,4 @@
+ts-1681-nontail-return.scala:10: error: return expressions in CPS code must be in tail position
+ return v
+ ^
+one error found
diff --git a/test/files/continuations-neg/ts-1681-nontail-return.scala b/test/files/continuations-neg/ts-1681-nontail-return.scala
new file mode 100644
index 0000000000..af86ad304f
--- /dev/null
+++ b/test/files/continuations-neg/ts-1681-nontail-return.scala
@@ -0,0 +1,18 @@
+import scala.util.continuations._
+
+class ReturnRepro {
+ def s1: Int @cpsParam[Any, Unit] = shift { k => k(5) }
+ def caller = reset { println(p(3)) }
+
+ def p(i: Int): Int @cpsParam[Unit, Any] = {
+ val v= s1 + 3
+ if (v == 8)
+ return v
+ v + 1
+ }
+}
+
+object Test extends App {
+ val repro = new ReturnRepro
+ repro.caller
+}
diff --git a/test/files/continuations-run/ts-1681-2.check b/test/files/continuations-run/ts-1681-2.check
new file mode 100644
index 0000000000..35b3c93780
--- /dev/null
+++ b/test/files/continuations-run/ts-1681-2.check
@@ -0,0 +1,5 @@
+8
+hi
+8
+from try
+8
diff --git a/test/files/continuations-run/ts-1681-2.scala b/test/files/continuations-run/ts-1681-2.scala
new file mode 100644
index 0000000000..8a896dec2c
--- /dev/null
+++ b/test/files/continuations-run/ts-1681-2.scala
@@ -0,0 +1,44 @@
+import scala.util.continuations._
+
+class ReturnRepro {
+ def s1: Int @cps[Any] = shift { k => k(5) }
+ def caller = reset { println(p(3)) }
+ def caller2 = reset { println(p2(3)) }
+ def caller3 = reset { println(p3(3)) }
+
+ def p(i: Int): Int @cps[Any] = {
+ val v= s1 + 3
+ return v
+ }
+
+ def p2(i: Int): Int @cps[Any] = {
+ val v = s1 + 3
+ if (v > 0) {
+ println("hi")
+ return v
+ } else {
+ println("hi")
+ return 8
+ }
+ }
+
+ def p3(i: Int): Int @cps[Any] = {
+ val v = s1 + 3
+ try {
+ println("from try")
+ return v
+ } catch {
+ case e: Exception =>
+ println("from catch")
+ return 7
+ }
+ }
+
+}
+
+object Test extends App {
+ val repro = new ReturnRepro
+ repro.caller
+ repro.caller2
+ repro.caller3
+}
diff --git a/test/files/continuations-run/ts-1681-3.check b/test/files/continuations-run/ts-1681-3.check
new file mode 100644
index 0000000000..71489f097c
--- /dev/null
+++ b/test/files/continuations-run/ts-1681-3.check
@@ -0,0 +1,4 @@
+enter return expr
+8
+hi
+8
diff --git a/test/files/continuations-run/ts-1681-3.scala b/test/files/continuations-run/ts-1681-3.scala
new file mode 100644
index 0000000000..62c547f5a2
--- /dev/null
+++ b/test/files/continuations-run/ts-1681-3.scala
@@ -0,0 +1,27 @@
+import scala.util.continuations._
+
+class ReturnRepro {
+ def s1: Int @cpsParam[Any, Unit] = shift { k => k(5) }
+ def caller = reset { println(p(3)) }
+ def caller2 = reset { println(p2(3)) }
+
+ def p(i: Int): Int @cpsParam[Unit, Any] = {
+ val v= s1 + 3
+ return { println("enter return expr"); v }
+ }
+
+ def p2(i: Int): Int @cpsParam[Unit, Any] = {
+ val v = s1 + 3
+ if (v > 0) {
+ return { println("hi"); v }
+ } else {
+ return { println("hi"); 8 }
+ }
+ }
+}
+
+object Test extends App {
+ val repro = new ReturnRepro
+ repro.caller
+ repro.caller2
+}
diff --git a/test/files/continuations-run/ts-1681.check b/test/files/continuations-run/ts-1681.check
new file mode 100644
index 0000000000..85176d8e66
--- /dev/null
+++ b/test/files/continuations-run/ts-1681.check
@@ -0,0 +1,3 @@
+8
+hi
+8
diff --git a/test/files/continuations-run/ts-1681.scala b/test/files/continuations-run/ts-1681.scala
new file mode 100644
index 0000000000..efb1abae15
--- /dev/null
+++ b/test/files/continuations-run/ts-1681.scala
@@ -0,0 +1,29 @@
+import scala.util.continuations._
+
+class ReturnRepro {
+ def s1: Int @cpsParam[Any, Unit] = shift { k => k(5) }
+ def caller = reset { println(p(3)) }
+ def caller2 = reset { println(p2(3)) }
+
+ def p(i: Int): Int @cpsParam[Unit, Any] = {
+ val v= s1 + 3
+ return v
+ }
+
+ def p2(i: Int): Int @cpsParam[Unit, Any] = {
+ val v = s1 + 3
+ if (v > 0) {
+ println("hi")
+ return v
+ } else {
+ println("hi")
+ return 8
+ }
+ }
+}
+
+object Test extends App {
+ val repro = new ReturnRepro
+ repro.caller
+ repro.caller2
+}
diff --git a/test/files/presentation/ide-bug-1000531.check b/test/files/presentation/ide-bug-1000531.check
index e813ce119b..9d4674d7c7 100644
--- a/test/files/presentation/ide-bug-1000531.check
+++ b/test/files/presentation/ide-bug-1000531.check
@@ -3,7 +3,7 @@ reload: CrashOnLoad.scala
askTypeCompletion at CrashOnLoad.scala(6,12)
================================================================================
[response] aksTypeCompletion at (6,12)
-retrieved 124 members
+retrieved 126 members
[accessible: true] `class GroupedIteratorIterator[B]#GroupedIterator`
[accessible: true] `method !=(x$1: Any)Boolean`
[accessible: true] `method !=(x$1: AnyRef)Boolean`
@@ -25,6 +25,7 @@ retrieved 124 members
[accessible: true] `method collectFirst[B](pf: PartialFunction[B,B])Option[B]`
[accessible: true] `method collect[B](pf: PartialFunction[B,B])Iterator[B]`
[accessible: true] `method contains(elem: Any)Boolean`
+[accessible: true] `method convertTo[Col[_]](implicit cbf: scala.collection.generic.CanBuildFrom[Nothing,B,Col[B]])Col[B]`
[accessible: true] `method copyToArray[B >: B](xs: Array[B])Unit`
[accessible: true] `method copyToArray[B >: B](xs: Array[B], start: Int)Unit`
[accessible: true] `method copyToArray[B >: B](xs: Array[B], start: Int, len: Int)Unit`
@@ -109,6 +110,7 @@ retrieved 124 members
[accessible: true] `method toStream=> scala.collection.immutable.Stream[B]`
[accessible: true] `method toString()String`
[accessible: true] `method toTraversable=> Traversable[B]`
+[accessible: true] `method toVector=> Vector[B]`
[accessible: true] `method wait()Unit`
[accessible: true] `method wait(x$1: Long)Unit`
[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
diff --git a/test/files/run/collection-conversions.check b/test/files/run/collection-conversions.check
new file mode 100644
index 0000000000..08d0fa32c5
--- /dev/null
+++ b/test/files/run/collection-conversions.check
@@ -0,0 +1,104 @@
+-- Testing iterator ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+-- Testing Vector ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+-- Testing List ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+-- Testing Buffer ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+-- Testing ParVector ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+-- Testing Set ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+-- Testing SetView ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+-- Testing BufferView ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
diff --git a/test/files/run/collection-conversions.scala b/test/files/run/collection-conversions.scala
new file mode 100644
index 0000000000..b5c4d8e261
--- /dev/null
+++ b/test/files/run/collection-conversions.scala
@@ -0,0 +1,60 @@
+import collection._
+import mutable.Buffer
+import parallel.immutable.ParVector
+import reflect.ClassTag
+
+object Test {
+
+ def printResult[A,B](msg: String, obj: A, expected: B)(implicit tag: ClassTag[A], tag2: ClassTag[B]) = {
+ print(" :" + msg +": ")
+ val isArray = obj match {
+ case x: Array[Int] => true
+ case _ => false
+ }
+ val expectedEquals =
+ if(isArray) obj.asInstanceOf[Array[Int]].toSeq == expected.asInstanceOf[Array[Int]].toSeq
+ else obj == expected
+ val tagEquals = tag == tag2
+ if(expectedEquals && tagEquals) print("OK")
+ else print("FAILED")
+ if(!expectedEquals) print(", " + obj + " != " + expected)
+ if(!tagEquals) print(", " + tag + " != " + tag2)
+ println("")
+ }
+
+ val testVector = Vector(1,2,3)
+ val testBuffer = Buffer(1,2,3)
+ val testGenSeq = GenSeq(1,2,3)
+ val testSeq = Seq(1,2,3)
+ val testStream = Stream(1,2,3)
+ val testArray = Array(1,2,3)
+ val testParVector = ParVector(1,2,3)
+
+ def testConversion[A: ClassTag](name: String, col: => GenTraversableOnce[A]): Unit = {
+ val tmp = col
+ println("-- Testing " + name + " ---")
+ printResult("[Direct] Vector ", col.toVector, testVector)
+ printResult("[Copy] Vector ", col.convertTo[Vector], testVector)
+ printResult("[Direct] Buffer ", col.toBuffer, testBuffer)
+ printResult("[Copy] Buffer ", col.convertTo[Buffer], testBuffer)
+ printResult("[Direct] GenSeq ", col.toSeq, testGenSeq)
+ printResult("[Copy] GenSeq ", col.convertTo[GenSeq], testGenSeq)
+ printResult("[Copy] Seq ", col.convertTo[Seq], testSeq)
+ printResult("[Direct] Stream ", col.toStream, testStream)
+ printResult("[Copy] Stream ", col.convertTo[Stream], testStream)
+ printResult("[Direct] Array ", col.toArray, testArray)
+ printResult("[Copy] Array ", col.convertTo[Array], testArray)
+ printResult("[Copy] ParVector", col.convertTo[ParVector], testParVector)
+ }
+
+ def main(args: Array[String]): Unit = {
+ testConversion("iterator", (1 to 3).iterator)
+ testConversion("Vector", Vector(1,2,3))
+ testConversion("List", List(1,2,3))
+ testConversion("Buffer", Buffer(1,2,3))
+ testConversion("ParVector", ParVector(1,2,3))
+ testConversion("Set", Set(1,2,3))
+ testConversion("SetView", Set(1,2,3).view)
+ testConversion("BufferView", Buffer(1,2,3).view)
+ }
+}
diff --git a/test/files/run/t4954.scala b/test/files/run/t4954.scala
new file mode 100644
index 0000000000..b4916e651d
--- /dev/null
+++ b/test/files/run/t4954.scala
@@ -0,0 +1,45 @@
+
+
+import collection._
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ val m = scala.collection.mutable.LinkedHashMap("one" -> 1, "two" -> 2, "three" -> 3, "four" -> 4, "five" -> 5)
+ val expected = List("one", "two", "three", "four", "five")
+ assert(m.keys.iterator.toList == expected)
+ assert(m.keys.drop(0).iterator.toList == expected)
+ assert(m.keys.drop(1).iterator.toList == expected.drop(1))
+ assert(m.keys.drop(2).iterator.toList == expected.drop(2))
+ assert(m.keys.drop(3).iterator.toList == expected.drop(3))
+ assert(m.keys.drop(4).iterator.toList == expected.drop(4))
+ assert(m.keys.drop(5).iterator.toList == expected.drop(5))
+
+ val expvals = List(1, 2, 3, 4, 5)
+ assert(m.values.iterator.toList == expvals)
+ assert(m.values.drop(0).iterator.toList == expvals)
+ assert(m.values.drop(1).iterator.toList == expvals.drop(1))
+ assert(m.values.drop(2).iterator.toList == expvals.drop(2))
+ assert(m.values.drop(3).iterator.toList == expvals.drop(3))
+ assert(m.values.drop(4).iterator.toList == expvals.drop(4))
+ assert(m.values.drop(5).iterator.toList == expvals.drop(5))
+
+ val pred = (x: String) => x.length < 6
+ val filtered = m.filterKeys(pred)
+ assert(filtered.drop(0).keys.toList == expected.filter(pred))
+ assert(filtered.drop(1).keys.toList == expected.filter(pred).drop(1))
+ assert(filtered.drop(2).keys.toList == expected.filter(pred).drop(2))
+ assert(filtered.drop(3).keys.toList == expected.filter(pred).drop(3))
+ assert(filtered.drop(4).keys.toList == expected.filter(pred).drop(4))
+
+ val mapped = m.mapValues(-_)
+ assert(mapped.drop(0).keys.toList == expected)
+ assert(mapped.drop(1).keys.toList == expected.drop(1))
+ assert(mapped.drop(2).keys.toList == expected.drop(2))
+ assert(mapped.drop(3).keys.toList == expected.drop(3))
+ assert(mapped.drop(4).keys.toList == expected.drop(4))
+ assert(mapped.drop(5).keys.toList == expected.drop(5))
+ }
+
+}
diff --git a/test/files/run/t5912.scala b/test/files/run/t5912.scala
new file mode 100644
index 0000000000..7710d04396
--- /dev/null
+++ b/test/files/run/t5912.scala
@@ -0,0 +1,6 @@
+object Test extends App{
+ import scala.reflect.runtime.{currentMirror=>cm}
+ import scala.tools.reflect._
+ import scala.reflect.runtime.universe._
+ val tree = cm.mkToolBox().typeCheck( Literal(Constant("test")) )
+} \ No newline at end of file