summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--bincompat-backward.whitelist.conf9
-rw-r--r--bincompat-forward.whitelist.conf97
-rw-r--r--build.number4
-rwxr-xr-xbuild.xml5
-rwxr-xr-xscripts/jobs/integrate/bootstrap4
-rwxr-xr-xscripts/jobs/integrate/ide3
-rw-r--r--src/compiler/scala/tools/cmd/Property.scala3
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala22
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala4
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaVersion.scala50
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala6
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala20
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala60
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala15
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala104
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala33
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala29
-rw-r--r--src/compiler/scala/tools/reflect/WrappedProperties.scala5
-rw-r--r--src/compiler/scala/tools/util/PathResolver.scala2
-rw-r--r--src/eclipse/partest/.classpath2
-rw-r--r--src/eclipse/repl/.classpath1
-rw-r--r--src/eclipse/scaladoc/.classpath6
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java3
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java5
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java1
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java3
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java1
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java1
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java2
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java1
-rw-r--r--src/forkjoin/scala/concurrent/util/Unsafe.java6
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Global.scala27
-rw-r--r--src/library/scala/PartialFunction.scala4
-rw-r--r--src/library/scala/Predef.scala2
-rw-r--r--src/library/scala/beans/BeanInfo.scala1
-rw-r--r--src/library/scala/collection/IterableViewLike.scala5
-rw-r--r--src/library/scala/collection/Iterator.scala84
-rw-r--r--src/library/scala/collection/MapLike.scala21
-rw-r--r--src/library/scala/collection/SeqViewLike.scala22
-rw-r--r--src/library/scala/collection/SetLike.scala15
-rw-r--r--src/library/scala/collection/TraversableLike.scala2
-rw-r--r--src/library/scala/collection/TraversableViewLike.scala22
-rw-r--r--src/library/scala/collection/concurrent/Map.scala11
-rw-r--r--src/library/scala/collection/convert/WrapAsJava.scala56
-rw-r--r--src/library/scala/collection/convert/WrapAsScala.scala35
-rw-r--r--src/library/scala/collection/immutable/HashSet.scala30
-rw-r--r--src/library/scala/collection/immutable/List.scala4
-rw-r--r--src/library/scala/collection/immutable/ListMap.scala19
-rw-r--r--src/library/scala/collection/immutable/Map.scala22
-rw-r--r--src/library/scala/collection/immutable/PagedSeq.scala2
-rw-r--r--src/library/scala/collection/immutable/Range.scala19
-rw-r--r--src/library/scala/collection/immutable/Stream.scala103
-rw-r--r--src/library/scala/collection/immutable/StreamViewLike.scala2
-rw-r--r--src/library/scala/collection/mutable/AnyRefMap.scala18
-rw-r--r--src/library/scala/collection/mutable/ArrayOps.scala5
-rw-r--r--src/library/scala/collection/mutable/ArraySeq.scala2
-rw-r--r--src/library/scala/collection/mutable/BufferLike.scala7
-rw-r--r--src/library/scala/collection/mutable/BufferProxy.scala2
-rw-r--r--src/library/scala/collection/mutable/ListBuffer.scala7
-rw-r--r--src/library/scala/collection/mutable/LongMap.scala20
-rw-r--r--src/library/scala/collection/mutable/MapLike.scala12
-rw-r--r--src/library/scala/collection/mutable/ResizableArray.scala2
-rw-r--r--src/library/scala/collection/mutable/SetLike.scala11
-rw-r--r--src/library/scala/collection/parallel/RemainsIterator.scala9
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashSet.scala2
-rw-r--r--src/library/scala/concurrent/BlockContext.scala15
-rw-r--r--src/library/scala/concurrent/ExecutionContext.scala36
-rw-r--r--src/library/scala/concurrent/Future.scala469
-rw-r--r--src/library/scala/concurrent/Promise.scala12
-rw-r--r--src/library/scala/concurrent/duration/Duration.scala2
-rw-r--r--src/library/scala/concurrent/impl/AbstractPromise.java40
-rw-r--r--src/library/scala/concurrent/impl/ExecutionContextImpl.scala208
-rw-r--r--src/library/scala/concurrent/impl/Future.scala34
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala167
-rw-r--r--src/library/scala/deprecatedName.scala4
-rw-r--r--src/library/scala/io/Source.scala4
-rw-r--r--src/library/scala/math/package.scala2
-rw-r--r--src/library/scala/runtime/BoxesRunTime.java2
-rw-r--r--src/library/scala/runtime/ScalaRunTime.scala2
-rw-r--r--src/library/scala/sys/SystemProperties.scala11
-rw-r--r--src/library/scala/sys/process/BasicIO.scala2
-rw-r--r--src/library/scala/sys/process/ProcessImpl.scala134
-rw-r--r--src/library/scala/sys/process/package.scala30
-rw-r--r--src/library/scala/util/Either.scala2
-rw-r--r--src/library/scala/util/Try.scala118
-rw-r--r--src/reflect/scala/reflect/internal/Mirrors.scala11
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala1
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala18
-rw-r--r--src/reflect/scala/reflect/internal/TreeGen.scala4
-rw-r--r--src/reflect/scala/reflect/internal/TreeInfo.scala1
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala20
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeMaps.scala9
-rw-r--r--src/reflect/scala/reflect/internal/transform/Erasure.scala11
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala8
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala47
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala30
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala10
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js22
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala10
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala2
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala2
-rw-r--r--test/files/jvm/future-spec.check2
-rw-r--r--test/files/jvm/future-spec/FutureTests.scala273
-rw-r--r--test/files/jvm/scala-concurrent-tck.check1
-rw-r--r--test/files/jvm/scala-concurrent-tck.scala124
-rw-r--r--test/files/jvm/t7146.scala2
-rw-r--r--test/files/jvm/t8582.check3
-rw-r--r--test/files/jvm/t8582.flags1
-rw-r--r--test/files/neg/beanInfoDeprecation.check6
-rw-r--r--test/files/neg/beanInfoDeprecation.flags1
-rw-r--r--test/files/neg/beanInfoDeprecation.scala2
-rw-r--r--test/files/neg/logImplicits.check2
-rw-r--r--test/files/neg/names-defaults-neg.check48
-rw-r--r--test/files/neg/names-defaults-neg.scala4
-rw-r--r--test/files/neg/t8764.check6
-rw-r--r--test/files/neg/t8764.flags1
-rw-r--r--test/files/neg/t8764.scala9
-rw-r--r--test/files/neg/t8849.check7
-rw-r--r--test/files/neg/t8849.scala10
-rw-r--r--test/files/pos/alladin763.scala37
-rw-r--r--test/files/pos/t6778.scala5
-rw-r--r--test/files/pos/t7784.scala13
-rw-r--r--test/files/pos/t8462.scala11
-rw-r--r--test/files/pos/t8862a.scala47
-rw-r--r--test/files/pos/t8862b.scala12
-rw-r--r--test/files/pos/t9074.scala24
-rw-r--r--test/files/pos/t9074b.scala15
-rw-r--r--test/files/pos/t9131.scala12
-rw-r--r--test/files/run/analyzerPlugins.check8
-rw-r--r--test/files/run/classfile-format-51.scala1
-rw-r--r--test/files/run/classfile-format-52.scala1
-rw-r--r--test/files/run/duration-coarsest.scala5
-rw-r--r--test/files/run/future-flatmap-exec-count.check1
-rw-r--r--test/files/run/inline-ex-handlers.check24
-rw-r--r--test/files/run/lub-visibility.check2
-rw-r--r--test/files/run/t2251b.check4
-rw-r--r--test/files/run/t4332.scala2
-rw-r--r--test/files/run/t6827.check12
-rw-r--r--test/files/run/t6827.scala20
-rw-r--r--test/files/run/t7521/Test.scala5
-rw-r--r--test/files/run/t7521/Wrapper.scala1
-rw-r--r--test/files/run/t7521b.check7
-rw-r--r--test/files/run/t7521b.scala20
-rw-r--r--test/files/run/t7775.scala43
-rw-r--r--test/files/run/t8575.scala32
-rw-r--r--test/files/run/t8575b.scala17
-rw-r--r--test/files/run/t8575c.scala23
-rw-r--r--test/files/run/t8710.scala17
-rw-r--r--test/files/run/t8764.check5
-rw-r--r--test/files/run/t8764.flags1
-rw-r--r--test/files/run/t8764.scala16
-rw-r--r--test/files/run/t8918-unary-ids.scala49
-rw-r--r--test/files/run/t8944/A_1.scala1
-rw-r--r--test/files/run/t8944/A_2.scala6
-rw-r--r--test/files/run/t8944/Test_1.scala3
-rw-r--r--test/files/run/t8944b.scala9
-rw-r--r--test/files/run/t8944c.check5
-rw-r--r--test/files/run/t8944c.scala8
-rw-r--r--test/files/run/t8955.scala12
-rw-r--r--test/files/run/t9174.check19
-rw-r--r--test/files/run/t9174.scala11
-rw-r--r--test/files/run/t9200/Test.java6
-rw-r--r--test/files/run/t9200/test.scala12
-rwxr-xr-xtest/files/scalacheck/concurrent-map.scala76
-rw-r--r--test/junit/scala/collection/SeqViewTest.scala16
-rw-r--r--test/junit/scala/collection/SetMapConsistencyTest.scala11
-rw-r--r--test/junit/scala/collection/convert/NullSafetyTest.scala279
-rw-r--r--test/junit/scala/collection/immutable/StreamTest.scala108
-rw-r--r--test/junit/scala/sys/process/t7350.scala298
-rw-r--r--test/junit/scala/tools/nsc/settings/ScalaVersionTest.scala44
-rw-r--r--test/scaladoc/run/t7905.check1
-rw-r--r--test/scaladoc/run/t7905.scala36
-rw-r--r--test/scaladoc/scalacheck/HtmlFactoryTest.scala13
-rw-r--r--versions.properties2
182 files changed, 3482 insertions, 1120 deletions
diff --git a/bincompat-backward.whitelist.conf b/bincompat-backward.whitelist.conf
index a1706d103d..637bd586e0 100644
--- a/bincompat-backward.whitelist.conf
+++ b/bincompat-backward.whitelist.conf
@@ -186,6 +186,15 @@ filter {
matchName="scala.reflect.runtime.SynchronizedOps.newNestedScope"
problemName=MissingMethodProblem
},
+ // see github.com/scala/scala/pull/3925, SI-8627, SI-6440
+ {
+ matchName="scala.collection.TraversableLike.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.immutable.Stream.filteredTail"
+ problemName=MissingMethodProblem
+ },
// https://github.com/scala/scala/pull/3848 -- SI-8680
{
matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$6"
diff --git a/bincompat-forward.whitelist.conf b/bincompat-forward.whitelist.conf
index 3808083dd3..80fe31ea22 100644
--- a/bincompat-forward.whitelist.conf
+++ b/bincompat-forward.whitelist.conf
@@ -272,6 +272,103 @@ filter {
matchName="scala.reflect.api.PredefTypeCreator"
problemName=MissingClassProblem
},
+ // see github.com/scala/scala/pull/3925, SI-8627, SI-6440
+ {
+ matchName="scala.collection.IterableViewLike#AbstractTransformed.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.AbstractTraversable.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.TraversableViewLike#AbstractTransformed.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.TraversableLike.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.SeqViewLike#AbstractTransformed.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.immutable.TreeSet.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.immutable.Stream.filteredTail"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.immutable.Stream.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.immutable.Stream.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.immutable.StringOps.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.immutable.TreeMap.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.concurrent.TrieMap.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.mutable.ArrayOps#ofByte.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.mutable.ArrayOps#ofLong.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.mutable.ArrayOps#ofUnit.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.mutable.ArrayOps#ofInt.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.mutable.ArrayOps#ofChar.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.mutable.ArrayOps#ofRef.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.mutable.ArrayOps#ofDouble.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.mutable.ArrayOps#ofFloat.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.mutable.ArrayOps#ofBoolean.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.mutable.ArrayOps#ofShort.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.mutable.TreeSet.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.io.AbstractFile.filterImpl"
+ problemName=MissingMethodProblem
+ },
// https://github.com/scala/scala/pull/3848 -- SI-8680
{
matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$6"
diff --git a/build.number b/build.number
index 5f8ed6d6b6..78e98dffb3 100644
--- a/build.number
+++ b/build.number
@@ -1,7 +1,7 @@
#Tue Sep 11 19:21:09 CEST 2007
version.major=2
-version.minor=11
-version.patch=6
+version.minor=12
+version.patch=0
# This is the -N part of a version. if it's 0, it's dropped from maven versions.
version.bnum=0
diff --git a/build.xml b/build.xml
index ee6a045bda..34b42270ed 100755
--- a/build.xml
+++ b/build.xml
@@ -1570,8 +1570,9 @@ TODO:
<target name="test.bc-opt" description="Optimized version of test.bc."> <optimized name="test.bc"/></target>
<target name="test.bc" depends="bc.prepare, pack.lib, pack.reflect" unless="test.bc.skip">
- <bc.check project="library"/>
- <bc.check project="reflect"/>
+ <echo message="binary compatibility testing disabled in the 2.12.x branch"/>
+ <!-- <bc.check project="library"/> -->
+ <!-- <bc.check project="reflect"/> -->
</target>
<!-- ===========================================================================
diff --git a/scripts/jobs/integrate/bootstrap b/scripts/jobs/integrate/bootstrap
index 46d610018c..e2ddaeda55 100755
--- a/scripts/jobs/integrate/bootstrap
+++ b/scripts/jobs/integrate/bootstrap
@@ -33,7 +33,7 @@
# $SCALA_VER_BASE$SCALA_VER_SUFFIX (if former variable is set)
# By parsing the tag (if HEAD is tagged as v$base$suffix)
# By parsing build.number for the base version, suffixing with -$sha-nightly
-# Serialize these versions to jenkins.properties, which are passed downstream to scala-release-2.11.x-dist.
+# Serialize these versions to jenkins.properties, which are passed downstream to scala-release-2.12.x-dist.
# This also removes the need to tag scala/scala-dist (not possible for nightlies, still encouraged for releases, but not a hard requirement).
#
# Determine Module Versions
@@ -320,7 +320,7 @@ determineScalaVersion() {
# TODO: publish nightly snapshot using this script
publishToSonatype="no"
- echo "repo_ref=2.11.x" >> $baseDir/jenkins.properties # for the -dist downstream jobs that build the actual archives
+ echo "repo_ref=2.12.x" >> $baseDir/jenkins.properties # for the -dist downstream jobs that build the actual archives
else
echo "HEAD is tagged as $scalaTag."
# borrowed from https://github.com/cloudflare/semver_bash/blob/master/semver.sh
diff --git a/scripts/jobs/integrate/ide b/scripts/jobs/integrate/ide
index 5c1e6199e3..447f34f937 100755
--- a/scripts/jobs/integrate/ide
+++ b/scripts/jobs/integrate/ide
@@ -3,6 +3,9 @@
# requires env: scalaVersion (specifies binary already built from above checkout), WORKSPACE (provided by jenkins), repo_ref (HEAD of the scala checkout),
# requires files: $baseDir/versions.properties (from checkout -- defines version numbers for modules used to build scala for dbuild...)
+echo "IDE integration not yet available on 2.12.x. Punting."
+exit 0
+
# TODO: remove when integration is up and running
if [ "woele$_scabot_last" != "woele1" ]; then echo "Scabot didn't mark this as last commit -- skipping."; exit 0; fi
diff --git a/src/compiler/scala/tools/cmd/Property.scala b/src/compiler/scala/tools/cmd/Property.scala
index b1d951a5c4..e6262a7e40 100644
--- a/src/compiler/scala/tools/cmd/Property.scala
+++ b/src/compiler/scala/tools/cmd/Property.scala
@@ -9,6 +9,7 @@ package cmd
import nsc.io._
import java.util.Properties
import java.io.FileInputStream
+import scala.sys.SystemProperties
/** Contains logic for translating a property key/value pair into
* equivalent command line arguments. The default settings will
@@ -58,7 +59,7 @@ trait Property extends Reference {
returning(new Properties)(_ load new FileInputStream(file.path))
def systemPropertiesToOptions: List[String] =
- propertiesToOptions(System.getProperties)
+ propertiesToOptions(new SystemProperties().toList)
def propertiesToOptions(file: File): List[String] =
propertiesToOptions(loadProperties(file))
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 4663810003..627a181793 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -665,6 +665,15 @@ self =>
}
def isLiteral = isLiteralToken(in.token)
+ def isSimpleExprIntroToken(token: Token): Boolean = isLiteralToken(token) || (token match {
+ case IDENTIFIER | BACKQUOTED_IDENT |
+ THIS | SUPER | NEW | USCORE |
+ LPAREN | LBRACE | XMLSTART => true
+ case _ => false
+ })
+
+ def isSimpleExprIntro: Boolean = isExprIntroToken(in.token)
+
def isExprIntroToken(token: Token): Boolean = isLiteralToken(token) || (token match {
case IDENTIFIER | BACKQUOTED_IDENT |
THIS | SUPER | IF | FOR | NEW | USCORE | TRY | WHILE |
@@ -1565,11 +1574,14 @@ self =>
def prefixExpr(): Tree = {
if (isUnaryOp) {
atPos(in.offset) {
- val name = nme.toUnaryName(rawIdent().toTermName)
- if (name == nme.UNARY_- && isNumericLit)
- simpleExprRest(literal(isNegated = true), canApply = true)
- else
- Select(stripParens(simpleExpr()), name)
+ if (lookingAhead(isSimpleExprIntro)) {
+ val uname = nme.toUnaryName(rawIdent().toTermName)
+ if (uname == nme.UNARY_- && isNumericLit)
+ simpleExprRest(literal(isNegated = true), canApply = true)
+ else
+ Select(stripParens(simpleExpr()), uname)
+ }
+ else simpleExpr()
}
}
else simpleExpr()
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index cf52ad6636..72aa44d8d9 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -1138,7 +1138,7 @@ abstract class GenICode extends SubComponent {
// a package here, check if there's a package object.
val sym = (
if (!tree.symbol.isPackageClass) tree.symbol
- else tree.symbol.info.member(nme.PACKAGE) match {
+ else tree.symbol.info.packageObject match {
case NoSymbol => abort("Cannot use package as value: " + tree)
case s =>
devWarning(s"Found ${tree.symbol} where a package object is required. Converting to ${s.moduleClass}")
@@ -1495,7 +1495,7 @@ abstract class GenICode extends SubComponent {
if (!settings.optimise) {
if (l.tpe <:< BoxedNumberClass.tpe) {
if (r.tpe <:< BoxedNumberClass.tpe) platform.externalEqualsNumNum
- else if (r.tpe <:< BoxedCharacterClass.tpe) platform.externalEqualsNumObject // will be externalEqualsNumChar in 2.12, SI-9030
+ else if (r.tpe <:< BoxedCharacterClass.tpe) platform.externalEqualsNumChar
else platform.externalEqualsNumObject
} else platform.externalEquals
} else {
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
index 15b014bdd3..5b53ac7bc6 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
@@ -917,7 +917,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
def genLoadModule(tree: Tree): BType = {
val module = (
if (!tree.symbol.isPackageClass) tree.symbol
- else tree.symbol.info.member(nme.PACKAGE) match {
+ else tree.symbol.info.packageObject match {
case NoSymbol => abort(s"SI-5604: Cannot use package as value: $tree")
case s => abort(s"SI-5604: found package class where package object expected: $tree")
}
@@ -1227,7 +1227,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
val equalsMethod: Symbol = {
if (l.tpe <:< BoxedNumberClass.tpe) {
if (r.tpe <:< BoxedNumberClass.tpe) platform.externalEqualsNumNum
- else if (r.tpe <:< BoxedCharacterClass.tpe) platform.externalEqualsNumObject // will be externalEqualsNumChar in 2.12, SI-9030
+ else if (r.tpe <:< BoxedCharacterClass.tpe) platform.externalEqualsNumChar
else platform.externalEqualsNumObject
} else platform.externalEquals
}
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
index 43bdad5882..7e67b7bec6 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
@@ -68,45 +68,39 @@ case object AnyScalaVersion extends ScalaVersion {
* Factory methods for producing ScalaVersions
*/
object ScalaVersion {
- private val dot = "\\."
- private val dash = "\\-"
- private def not(s:String) = s"[^${s}]"
- private val R = s"((${not(dot)}*)(${dot}(${not(dot)}*)(${dot}(${not(dash)}*)(${dash}(.*))?)?)?)".r
-
- def apply(versionString : String, errorHandler: String => Unit): ScalaVersion = {
- def errorAndValue() = {
- errorHandler(
- s"There was a problem parsing ${versionString}. " +
- "Versions should be in the form major[.minor[.revision]] " +
- "where each part is a positive number, as in 2.10.1. " +
- "The minor and revision parts are optional."
- )
- AnyScalaVersion
- }
+ private val dot = """\."""
+ private val dash = "-"
+ private val vchar = """\d""" //"[^-+.]"
+ private val vpat = s"(?s)($vchar+)(?:$dot($vchar+)(?:$dot($vchar+)(?:$dash(.*))?)?)?".r
+ private val rcpat = """(?i)rc(\d*)""".r
+ private val mspat = """(?i)m(\d*)""".r
+
+ def apply(versionString: String, errorHandler: String => Unit): ScalaVersion = {
+ def error() = errorHandler(
+ s"There was a problem parsing ${versionString}. " +
+ "Versions should be in the form major[.minor[.revision]] " +
+ "where each part is a positive number, as in 2.10.1. " +
+ "The minor and revision parts are optional."
+ )
def toInt(s: String) = s match {
case null | "" => 0
- case _ => s.toInt
+ case _ => s.toInt
}
- def isInt(s: String) = util.Try(toInt(s)).isSuccess
-
def toBuild(s: String) = s match {
case null | "FINAL" => Final
- case s if (s.toUpperCase.startsWith("RC") && isInt(s.substring(2))) => RC(toInt(s.substring(2)))
- case s if (s.toUpperCase.startsWith("M") && isInt(s.substring(1))) => Milestone(toInt(s.substring(1)))
- case _ => Development(s)
+ case rcpat(i) => RC(toInt(i))
+ case mspat(i) => Milestone(toInt(i))
+ case _ /* | "" */ => Development(s)
}
- try versionString match {
+ versionString match {
case "none" => NoScalaVersion
- case "any" => AnyScalaVersion
- case R(_, majorS, _, minorS, _, revS, _, buildS) =>
+ case "any" => AnyScalaVersion
+ case vpat(majorS, minorS, revS, buildS) =>
SpecificScalaVersion(toInt(majorS), toInt(minorS), toInt(revS), toBuild(buildS))
- case _ =>
- errorAndValue()
- } catch {
- case e: NumberFormatException => errorAndValue()
+ case _ => error() ; AnyScalaVersion
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 5c72bb3258..f686df60fd 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -339,8 +339,8 @@ abstract class Erasure extends AddInterfaces
buf append (if (restpe.typeSymbol == UnitClass || sym0.isConstructor) VOID_TAG.toString else jsig(restpe))
buf.toString
- case RefinedType(parent :: _, decls) =>
- boxedSig(parent)
+ case RefinedType(parents, decls) =>
+ boxedSig(intersectionDominator(parents))
case ClassInfoType(parents, _, _) =>
superSig(parents)
case AnnotatedType(_, atp) =>
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index 6225b486c2..f3cab8184c 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -446,8 +446,10 @@ abstract class ExplicitOuter extends InfoTransform
//
// See SI-6552 for an example of why `sym.owner.enclMethod hasAnnotation ScalaInlineClass`
// is not suitable; if we make a method-local class non-private, it mangles outer pointer names.
- if (currentClass != sym.owner ||
- (closestEnclMethod(currentOwner) hasAnnotation ScalaInlineClass))
+ def enclMethodIsInline = closestEnclMethod(currentOwner) hasAnnotation ScalaInlineClass
+ // SI-8710 The extension method condition reflects our knowledge that a call to `new Meter(12).privateMethod`
+ // with later be rewritten (in erasure) to `Meter.privateMethod$extension(12)`.
+ if ((currentClass != sym.owner || enclMethodIsInline) && !sym.isMethodWithExtension)
sym.makeNotPrivate(sym.owner)
val qsym = qual.tpe.widen.typeSymbol
diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
index 116047a2ad..6349fc3fb9 100644
--- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
@@ -208,7 +208,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
def makeExtensionMethodSymbol = {
val extensionName = extensionNames(origMeth).head.toTermName
val extensionMeth = (
- companion.moduleClass.newMethod(extensionName, tree.pos.focus, origMeth.flags & ~OVERRIDE & ~PROTECTED & ~LOCAL | FINAL)
+ companion.moduleClass.newMethod(extensionName, tree.pos.focus, origMeth.flags & ~OVERRIDE & ~PROTECTED & ~PRIVATE & ~LOCAL | FINAL)
setAnnotations origMeth.annotations
)
origMeth.removeAnnotation(TailrecClass) // it's on the extension method, now.
diff --git a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
index 56ed0ee16c..2f4771e9d4 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
@@ -75,7 +75,7 @@ abstract class ConstantFolder {
case nme.AND => Constant(x.booleanValue & y.booleanValue)
case nme.EQ => Constant(x.booleanValue == y.booleanValue)
case nme.NE => Constant(x.booleanValue != y.booleanValue)
- case _ => null
+ case _ => null
}
private def foldSubrangeOp(op: Name, x: Constant, y: Constant): Constant = op match {
case nme.OR => Constant(x.intValue | y.intValue)
@@ -95,14 +95,20 @@ abstract class ConstantFolder {
case nme.MUL => Constant(x.intValue * y.intValue)
case nme.DIV => Constant(x.intValue / y.intValue)
case nme.MOD => Constant(x.intValue % y.intValue)
- case _ => null
+ case _ => null
}
private def foldLongOp(op: Name, x: Constant, y: Constant): Constant = op match {
case nme.OR => Constant(x.longValue | y.longValue)
case nme.XOR => Constant(x.longValue ^ y.longValue)
case nme.AND => Constant(x.longValue & y.longValue)
- case nme.LSL => Constant(x.longValue << y.longValue)
+ case nme.LSL if x.tag <= IntTag
+ => Constant(x.intValue << y.longValue)
+ case nme.LSL => Constant(x.longValue << y.longValue)
+ case nme.LSR if x.tag <= IntTag
+ => Constant(x.intValue >>> y.longValue)
case nme.LSR => Constant(x.longValue >>> y.longValue)
+ case nme.ASR if x.tag <= IntTag
+ => Constant(x.intValue >> y.longValue)
case nme.ASR => Constant(x.longValue >> y.longValue)
case nme.EQ => Constant(x.longValue == y.longValue)
case nme.NE => Constant(x.longValue != y.longValue)
@@ -115,7 +121,7 @@ abstract class ConstantFolder {
case nme.MUL => Constant(x.longValue * y.longValue)
case nme.DIV => Constant(x.longValue / y.longValue)
case nme.MOD => Constant(x.longValue % y.longValue)
- case _ => null
+ case _ => null
}
private def foldFloatOp(op: Name, x: Constant, y: Constant): Constant = op match {
case nme.EQ => Constant(x.floatValue == y.floatValue)
@@ -129,7 +135,7 @@ abstract class ConstantFolder {
case nme.MUL => Constant(x.floatValue * y.floatValue)
case nme.DIV => Constant(x.floatValue / y.floatValue)
case nme.MOD => Constant(x.floatValue % y.floatValue)
- case _ => null
+ case _ => null
}
private def foldDoubleOp(op: Name, x: Constant, y: Constant): Constant = op match {
case nme.EQ => Constant(x.doubleValue == y.doubleValue)
@@ -143,7 +149,7 @@ abstract class ConstantFolder {
case nme.MUL => Constant(x.doubleValue * y.doubleValue)
case nme.DIV => Constant(x.doubleValue / y.doubleValue)
case nme.MOD => Constant(x.doubleValue % y.doubleValue)
- case _ => null
+ case _ => null
}
private def foldBinop(op: Name, x: Constant, y: Constant): Constant = {
@@ -162,7 +168,7 @@ abstract class ConstantFolder {
case _ => null
}
catch {
- case ex: ArithmeticException => null
+ case _: ArithmeticException => null
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index ca25e59c4b..98289f6a3c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -809,12 +809,8 @@ trait Contexts { self: Analyzer =>
private def collectImplicitImports(imp: ImportInfo): List[ImplicitInfo] = {
val qual = imp.qual
- val pre =
- if (qual.tpe.typeSymbol.isPackageClass)
- // SI-6225 important if the imported symbol is inherited by the the package object.
- singleType(qual.tpe, qual.tpe member nme.PACKAGE)
- else
- qual.tpe
+ val qualSym = qual.tpe.typeSymbol
+ val pre = qual.tpe
def collect(sels: List[ImportSelector]): List[ImplicitInfo] = sels match {
case List() =>
List()
@@ -885,7 +881,8 @@ trait Contexts { self: Analyzer =>
Some(collectImplicitImports(imports.head))
} else if (owner.isPackageClass) {
// the corresponding package object may contain implicit members.
- Some(collectImplicits(owner.tpe.implicitMembers, owner.tpe))
+ val pre = owner.packageObject.typeOfThis
+ Some(collectImplicits(pre.implicitMembers, pre))
} else Some(Nil)
}
@@ -955,52 +952,11 @@ trait Contexts { self: Analyzer =>
private def importedAccessibleSymbol(imp: ImportInfo, name: Name, requireExplicit: Boolean): Symbol =
imp.importedSymbol(name, requireExplicit) filter (s => isAccessible(s, imp.qual.tpe, superAccess = false))
- /** Is `sym` defined in package object of package `pkg`?
- * Since sym may be defined in some parent of the package object,
- * we cannot inspect its owner only; we have to go through the
- * info of the package object. However to avoid cycles we'll check
- * what other ways we can before pushing that way.
+ /** Must `sym` defined in package object of package `pkg`, if
+ * it selected from a prefix with `pkg` as its type symbol?
*/
- def isInPackageObject(sym: Symbol, pkg: Symbol): Boolean = {
- def uninitialized(what: String) = {
- log(s"Cannot look for $sym in package object of $pkg; $what is not initialized.")
- false
- }
- def pkgClass = if (pkg.isTerm) pkg.moduleClass else pkg
- def matchesInfo = (
- // need to be careful here to not get a cyclic reference during bootstrap
- if (pkg.isInitialized) {
- val module = pkg.info member nme.PACKAGEkw
- if (module.isInitialized)
- module.info.member(sym.name).alternatives contains sym
- else
- uninitialized("" + module)
- }
- else uninitialized("" + pkg)
- )
- def inPackageObject(sym: Symbol) = (
- // To be in the package object, one of these must be true:
- // 1) sym.owner is a package object class, and sym.owner.owner is the package class for `pkg`
- // 2) sym.owner is inherited by the correct package object class
- // We try to establish 1) by inspecting the owners directly, and then we try
- // to rule out 2), and only if both those fail do we resort to looking in the info.
- !sym.hasPackageFlag && sym.owner.exists && (
- if (sym.owner.isPackageObjectClass)
- sym.owner.owner == pkgClass
- else
- !sym.owner.isPackageClass && matchesInfo
- )
- )
-
- // An overloaded symbol might not have the expected owner!
- // The alternatives must be inspected directly.
- pkgClass.isPackageClass && (
- if (sym.isOverloaded)
- sym.alternatives forall (isInPackageObject(_, pkg))
- else
- inPackageObject(sym)
- )
- }
+ def isInPackageObject(sym: Symbol, pkg: Symbol): Boolean =
+ pkg.isPackage && sym.owner != pkg
def isNameInScope(name: Name) = lookupSymbol(name, _ => true).isSuccess
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index d3cd26f256..bf71ca5379 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -1012,15 +1012,12 @@ trait Implicits {
}
case None =>
if (pre.isStable && !pre.typeSymbol.isExistentiallyBound) {
- val companion = companionSymbolOf(sym, context)
- companion.moduleClass match {
- case mc: ModuleClassSymbol =>
- val infos =
- for (im <- mc.implicitMembers.toList) yield new ImplicitInfo(im.name, singleType(pre, companion), im)
- if (infos.nonEmpty)
- infoMap += (sym -> infos)
- case _ =>
- }
+ val pre1 =
+ if (sym.isPackageClass) sym.packageObject.typeOfThis
+ else singleType(pre, companionSymbolOf(sym, context))
+ val infos = pre1.implicitMembers.iterator.map(mem => new ImplicitInfo(mem.name, pre1, mem)).toList
+ if (infos.nonEmpty)
+ infoMap += (sym -> infos)
}
val bts = tp.baseTypeSeq
var i = 1
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index cf97474d9a..b9ccf0977c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -1447,7 +1447,7 @@ trait Infer extends Checkable {
log(s"Attaching AntiPolyType-carrying overloaded type to $sym")
// Multiple alternatives which are within bounds; spin up an
// overloaded type which carries an "AntiPolyType" as a prefix.
- val tparams = newAsSeenFromMap(pre, hd.owner) mapOver hd.typeParams
+ val tparams = new AsSeenFromMap(pre, hd.owner) mapOver hd.typeParams
val bounds = tparams map (_.tpeHK) // see e.g., #1236
val tpe = PolyType(tparams, OverloadedType(AntiPolyType(pre, bounds), alts))
finish(sym setInfo tpe, tpe)
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index 39cd610b1c..1bc5daac65 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -9,6 +9,7 @@ package typechecker
import symtab.Flags._
import scala.collection.mutable
import scala.reflect.ClassTag
+import PartialFunction.{ cond => when }
/**
* @author Lukas Rytz
@@ -551,64 +552,73 @@ trait NamesDefaults { self: Analyzer =>
}
}
- /**
- * Removes name assignments from args. Additionally, returns an array mapping
- * argument indices from call-site-order to definition-site-order.
+ /** Removes name assignments from args. Additionally, returns an array mapping
+ * argument indices from call-site-order to definition-site-order.
*
- * Verifies that names are not specified twice, positional args don't appear
- * after named ones.
+ * Verifies that names are not specified twice, and positional args don't appear after named ones.
*/
def removeNames(typer: Typer)(args: List[Tree], params: List[Symbol]): (List[Tree], Array[Int]) = {
implicit val context0 = typer.context
- // maps indices from (order written by user) to (order of definition)
- val argPos = Array.fill(args.length)(-1)
- var positionalAllowed = true
- val namelessArgs = mapWithIndex(args) { (arg, argIndex) =>
- arg match {
- case arg @ AssignOrNamedArg(Ident(name), rhs) =>
- def matchesName(param: Symbol) = !param.isSynthetic && (
- (param.name == name) || (param.deprecatedParamName match {
- case Some(`name`) =>
- context0.deprecationWarning(arg.pos, param,
- s"the parameter name $name has been deprecated. Use ${param.name} instead.")
- true
- case _ => false
- })
- )
- val paramPos = params indexWhere matchesName
- if (paramPos == -1) {
- if (positionalAllowed) {
- argPos(argIndex) = argIndex
- // prevent isNamed from being true when calling doTypedApply recursively,
- // treat the arg as an assignment of type Unit
- Assign(arg.lhs, rhs) setPos arg.pos
- }
- else UnknownParameterNameNamesDefaultError(arg, name)
- }
- else if (argPos contains paramPos) {
+ def matchesName(param: Symbol, name: Name, argIndex: Int) = {
+ def warn(w: String) = context0.deprecationWarning(args(argIndex).pos, param, w)
+ def checkDeprecation(anonOK: Boolean) =
+ when (param.deprecatedParamName) {
+ case Some(`name`) => true
+ case Some(nme.NO_NAME) => anonOK
+ }
+ def checkName = {
+ val res = param.name == name
+ if (res && checkDeprecation(true)) warn(s"naming parameter $name has been deprecated.")
+ res
+ }
+ def checkAltName = {
+ val res = checkDeprecation(false)
+ if (res) warn(s"the parameter name $name has been deprecated. Use ${param.name} instead.")
+ res
+ }
+ !param.isSynthetic && (checkName || checkAltName)
+ }
+ // argPos maps indices from (order written by user) to (order of definition)
+ val argPos = Array.fill(args.length)(-1)
+ val namelessArgs = {
+ var positionalAllowed = true
+ def stripNamedArg(arg: AssignOrNamedArg, argIndex: Int): Tree = {
+ val AssignOrNamedArg(Ident(name), rhs) = arg
+ params indexWhere (p => matchesName(p, name, argIndex)) match {
+ case -1 if positionalAllowed =>
+ // prevent isNamed from being true when calling doTypedApply recursively,
+ // treat the arg as an assignment of type Unit
+ Assign(arg.lhs, rhs) setPos arg.pos
+ case -1 =>
+ UnknownParameterNameNamesDefaultError(arg, name)
+ case paramPos if argPos contains paramPos =>
val existingArgIndex = argPos.indexWhere(_ == paramPos)
- val otherName = args(paramPos) match {
- case AssignOrNamedArg(Ident(oName), rhs) if oName != name => Some(oName)
- case _ => None
+ val otherName = Some(args(paramPos)) collect {
+ case AssignOrNamedArg(Ident(oName), _) if oName != name => oName
}
DoubleParamNamesDefaultError(arg, name, existingArgIndex+1, otherName)
- } else if (isAmbiguousAssignment(typer, params(paramPos), arg))
+ case paramPos if isAmbiguousAssignment(typer, params(paramPos), arg) =>
AmbiguousReferenceInNamesDefaultError(arg, name)
- else {
- // if the named argument is on the original parameter
- // position, positional after named is allowed.
- if (argIndex != paramPos)
- positionalAllowed = false
- argPos(argIndex) = paramPos
+ case paramPos if paramPos != argIndex =>
+ positionalAllowed = false // named arg is not in original parameter order: require names after this
+ argPos(argIndex) = paramPos // fix up the arg position
rhs
- }
- case _ =>
- argPos(argIndex) = argIndex
- if (positionalAllowed) arg
- else PositionalAfterNamedNamesDefaultError(arg)
+ case _ => rhs
+ }
+ }
+ mapWithIndex(args) {
+ case (arg: AssignOrNamedArg, argIndex) =>
+ val t = stripNamedArg(arg, argIndex)
+ if (!t.isErroneous && argPos(argIndex) < 0) argPos(argIndex) = argIndex
+ t
+ case (arg, argIndex) =>
+ if (positionalAllowed) {
+ argPos(argIndex) = argIndex
+ arg
+ } else
+ PositionalAfterNamedNamesDefaultError(arg)
}
}
-
(namelessArgs, argPos)
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index d2046a158c..8f13507fa9 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -87,24 +87,6 @@ trait SyntheticMethods extends ast.TreeDSL {
def accessors = clazz.caseFieldAccessors
val arity = accessors.size
- // If this is ProductN[T1, T2, ...], accessorLub is the lub of T1, T2, ..., .
- // !!! Hidden behind -Xexperimental due to bummer type inference bugs.
- // Refining from Iterator[Any] leads to types like
- //
- // Option[Int] { def productIterator: Iterator[String] }
- //
- // appearing legitimately, but this breaks invariant places
- // like Tags and Arrays which are not robust and infer things
- // which they shouldn't.
- val accessorLub = (
- if (settings.Xexperimental) {
- global.lub(accessors map (_.tpe.finalResultType)) match {
- case RefinedType(parents, decls) if !decls.isEmpty => intersectionType(parents)
- case tp => tp
- }
- }
- else AnyTpe
- )
def forwardToRuntime(method: Symbol): Tree =
forwardMethod(method, getMember(ScalaRunTimeModule, (method.name prepend "_")))(mkThis :: _)
@@ -125,8 +107,8 @@ trait SyntheticMethods extends ast.TreeDSL {
}
}
def productIteratorMethod = {
- createMethod(nme.productIterator, iteratorOfType(accessorLub))(_ =>
- gen.mkMethodCall(ScalaRunTimeModule, nme.typedProductIterator, List(accessorLub), List(mkThis))
+ createMethod(nme.productIterator, iteratorOfType(AnyTpe))(_ =>
+ gen.mkMethodCall(ScalaRunTimeModule, nme.typedProductIterator, List(AnyTpe), List(mkThis))
)
}
@@ -246,7 +228,7 @@ trait SyntheticMethods extends ast.TreeDSL {
List(
Product_productPrefix -> (() => constantNullary(nme.productPrefix, clazz.name.decode)),
Product_productArity -> (() => constantNullary(nme.productArity, arity)),
- Product_productElement -> (() => perElementMethod(nme.productElement, accessorLub)(mkThisSelect)),
+ Product_productElement -> (() => perElementMethod(nme.productElement, AnyTpe)(mkThisSelect)),
Product_iterator -> (() => productIteratorMethod),
Product_canEqual -> (() => canEqualMethod)
// This is disabled pending a reimplementation which doesn't add any
@@ -380,7 +362,14 @@ trait SyntheticMethods extends ast.TreeDSL {
for (ddef @ DefDef(_, _, _, _, _, _) <- templ.body ; if isRewrite(ddef.symbol)) {
val original = ddef.symbol
- val newAcc = deriveMethod(ddef.symbol, name => context.unit.freshTermName(name + "$")) { newAcc =>
+ val i = original.owner.caseFieldAccessors.indexOf(original)
+ def freshAccessorName = {
+ devWarning(s"Unable to find $original among case accessors of ${original.owner}: ${original.owner.caseFieldAccessors}")
+ context.unit.freshTermName(original.name + "$")
+ }
+ def nameSuffixedByParamIndex = original.name.append(nme.CASE_ACCESSOR + "$" + i).toTermName
+ val newName = if (i < 0) freshAccessorName else nameSuffixedByParamIndex
+ val newAcc = deriveMethod(ddef.symbol, name => newName) { newAcc =>
newAcc.makePublic
newAcc resetFlag (ACCESSOR | PARAMACCESSOR | OVERRIDE)
ddef.rhs.duplicate
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 758425aad5..8cd6b30258 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -542,7 +542,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
}
val qual = typedQualifier { atPos(tree.pos.makeTransparent) {
tree match {
- case Ident(_) => Ident(rootMirror.getPackageObjectWithMember(pre, sym))
+ case Ident(_) =>
+ val packageObject =
+ if (sym.owner.isModuleClass) sym.owner.sourceModule // historical optimization, perhaps no longer needed
+ else pre.typeSymbol.packageObject
+ Ident(packageObject)
case Select(qual, _) => Select(qual, nme.PACKAGEkw)
case SelectFromTypeTree(qual, _) => Select(qual, nme.PACKAGEkw)
}
@@ -928,24 +932,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
def insertApply(): Tree = {
assert(!context.inTypeConstructorAllowed, mode) //@M
val adapted = adaptToName(tree, nme.apply)
- def stabilize0(pre: Type): Tree = stabilize(adapted, pre, MonoQualifierModes, WildcardType)
-
- // TODO reconcile the overlap between Typers#stablize and TreeGen.stabilize
- val qual = adapted match {
- case This(_) =>
- gen.stabilize(adapted)
- case Ident(_) =>
- val owner = adapted.symbol.owner
- val pre =
- if (owner.isPackageClass) owner.thisType
- else if (owner.isClass) context.enclosingSubClassContext(owner).prefix
- else NoPrefix
- stabilize0(pre)
- case Select(qualqual, _) =>
- stabilize0(qualqual.tpe)
- case other =>
- other
- }
+ val qual = gen.stabilize(adapted)
typedPos(tree.pos, mode, pt) {
Select(qual setPos tree.pos.makeTransparent, nme.apply)
}
@@ -2228,7 +2215,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val allParams = meth.paramss.flatten
for (p <- allParams) {
for (n <- p.deprecatedParamName) {
- if (allParams.exists(p1 => p1.name == n || (p != p1 && p1.deprecatedParamName.exists(_ == n))))
+ if (allParams.exists(p1 => p != p1 && (p1.name == n || p1.deprecatedParamName.exists(_ == n))))
DeprecatedParamNameError(p, n)
}
}
@@ -5229,7 +5216,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
if (refTyped.isErrorTyped) {
setError(tree)
} else {
- tree setType refTyped.tpe.resultType
+ tree setType refTyped.tpe.resultType.deconst
if (refTyped.isErrorTyped || treeInfo.admitsTypeSelection(refTyped)) tree
else UnstableTreeError(tree)
}
diff --git a/src/compiler/scala/tools/reflect/WrappedProperties.scala b/src/compiler/scala/tools/reflect/WrappedProperties.scala
index 523287fc66..348d000d15 100644
--- a/src/compiler/scala/tools/reflect/WrappedProperties.scala
+++ b/src/compiler/scala/tools/reflect/WrappedProperties.scala
@@ -30,9 +30,10 @@ trait WrappedProperties extends PropertiesTrait {
def systemProperties: List[(String, String)] = {
import scala.collection.JavaConverters._
wrap {
+ // SI-7269,7775 Avoid `ConcurrentModificationException` and nulls if another thread modifies properties
val props = System.getProperties
- // SI-7269 Be careful to avoid `ConcurrentModificationException` if another thread modifies the properties map
- props.stringPropertyNames().asScala.toList.map(k => (k, props.get(k).asInstanceOf[String]))
+ val it = props.stringPropertyNames().asScala.iterator map (k => (k, props getProperty k)) filter (_._2 ne null)
+ it.toList
} getOrElse Nil
}
}
diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala
index 8e5b1e0a5c..f122437b63 100644
--- a/src/compiler/scala/tools/util/PathResolver.scala
+++ b/src/compiler/scala/tools/util/PathResolver.scala
@@ -52,7 +52,7 @@ object PathResolver {
*/
object Environment {
private def searchForBootClasspath =
- systemProperties find (_._1 endsWith ".boot.class.path") map (_._2) getOrElse ""
+ systemProperties collectFirst { case (k, v) if k endsWith ".boot.class.path" => v } getOrElse ""
/** Environment variables which java pays attention to so it
* seems we do as well.
diff --git a/src/eclipse/partest/.classpath b/src/eclipse/partest/.classpath
index 7f28868d95..7e2f119193 100644
--- a/src/eclipse/partest/.classpath
+++ b/src/eclipse/partest/.classpath
@@ -5,7 +5,7 @@
<classpathentry combineaccessrules="false" kind="src" path="/repl"/>
<classpathentry kind="var" path="M2_REPO/com/googlecode/java-diff-utils/diffutils/1.3.0/diffutils-1.3.0.jar"/>
<classpathentry kind="var" path="M2_REPO/org/scala-sbt/test-interface/1.0/test-interface-1.0.jar"/>
- <classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-partest_2.11.0-M7/1.0.0-RC8/scala-partest_2.11.0-M7-1.0.0-RC8.jar"/>
+ <classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-partest_2.11/1.0.0/scala-partest_2.11-1.0.0.jar"/>
<classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
diff --git a/src/eclipse/repl/.classpath b/src/eclipse/repl/.classpath
index 8ff9aabfbf..cbaabb9af1 100644
--- a/src/eclipse/repl/.classpath
+++ b/src/eclipse/repl/.classpath
@@ -3,7 +3,6 @@
<classpathentry kind="src" path="repl"/>
<classpathentry combineaccessrules="false" kind="src" path="/asm"/>
<classpathentry kind="var" path="M2_REPO/jline/jline/2.12/jline-2.12.jar"/>
- <!-- <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/repl/jline-2.12.jar"/> -->
<classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
diff --git a/src/eclipse/scaladoc/.classpath b/src/eclipse/scaladoc/.classpath
index c8f0e89b8a..ee6427176a 100644
--- a/src/eclipse/scaladoc/.classpath
+++ b/src/eclipse/scaladoc/.classpath
@@ -6,8 +6,8 @@
<classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
<classpathentry combineaccessrules="false" kind="src" path="/partest-extras"/>
- <classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-xml_2.11.0-M7/1.0.0-RC7/scala-xml_2.11.0-M7-1.0.0-RC7.jar"/>
- <classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-parser-combinators_2.11.0-M7/1.0.0-RC5/scala-parser-combinators_2.11.0-M7-1.0.0-RC5.jar"/>
- <classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-partest_2.11.0-M7/1.0.0-RC8/scala-partest_2.11.0-M7-1.0.0-RC8.jar"/>
+ <classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-xml_2.11/1.0.2/scala-xml_2.11-1.0.2.jar"/>
+ <classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-parser-combinators_2.11/1.0.1/scala-parser-combinators_2.11-1.0.1.jar"/>
+ <classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-partest_2.11/1.0.0/scala-partest_2.11-1.0.0.jar"/>
<classpathentry kind="output" path="build-quick-scaladoc"/>
</classpath>
diff --git a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java
index 6578504155..9bd378c61c 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java
@@ -23,6 +23,7 @@ import java.util.concurrent.TimeUnit;
* @since 1.8
* @author Doug Lea
*/
+@Deprecated
/*public*/ abstract class CountedCompleter<T> extends ForkJoinTask<T> {
private static final long serialVersionUID = 5232453752276485070L;
@@ -471,6 +472,7 @@ import java.util.concurrent.TimeUnit;
* @since 1.7
* @author Doug Lea
*/
+@Deprecated
public class ForkJoinPool extends AbstractExecutorService {
/*
@@ -3578,6 +3580,7 @@ public class ForkJoinPool extends AbstractExecutorService {
* }
* }}</pre>
*/
+ @Deprecated
public static interface ManagedBlocker {
/**
* Possibly blocks the current thread, for example waiting for
diff --git a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java
index fd1e132b07..b4f5c24ca9 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java
@@ -180,6 +180,7 @@ import java.lang.reflect.Constructor;
* @since 1.7
* @author Doug Lea
*/
+@Deprecated
public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
/*
@@ -391,6 +392,7 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
* any ForkJoinPool will call helpExpungeStaleExceptions when its
* pool becomes isQuiescent.
*/
+ @Deprecated
static final class ExceptionNode extends WeakReference<ForkJoinTask<?>> {
final Throwable ex;
ExceptionNode next;
@@ -1330,6 +1332,7 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
* to be compliant with AbstractExecutorService constraints
* when used in ForkJoinPool.
*/
+ @Deprecated
static final class AdaptedRunnable<T> extends ForkJoinTask<T>
implements RunnableFuture<T> {
final Runnable runnable;
@@ -1349,6 +1352,7 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
/**
* Adaptor for Runnables without results
*/
+ @Deprecated
static final class AdaptedRunnableAction extends ForkJoinTask<Void>
implements RunnableFuture<Void> {
final Runnable runnable;
@@ -1366,6 +1370,7 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
/**
* Adaptor for Callables
*/
+ @Deprecated
static final class AdaptedCallable<T> extends ForkJoinTask<T>
implements RunnableFuture<T> {
final Callable<? extends T> callable;
diff --git a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java
index e62fc6eb71..e00fb5cc43 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java
@@ -20,6 +20,7 @@ package scala.concurrent.forkjoin;
* @since 1.7
* @author Doug Lea
*/
+@Deprecated
public class ForkJoinWorkerThread extends Thread {
/*
* ForkJoinWorkerThreads are managed by ForkJoinPools and perform
diff --git a/src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java b/src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java
index 07e81b395d..47d52af895 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java
@@ -53,6 +53,7 @@ import java.util.concurrent.locks.LockSupport;
* @author Doug Lea
* @param <E> the type of elements held in this collection
*/
+@Deprecated
public class LinkedTransferQueue<E> extends AbstractQueue<E>
implements TransferQueue<E>, java.io.Serializable {
private static final long serialVersionUID = -3223113410248163686L;
@@ -416,6 +417,7 @@ public class LinkedTransferQueue<E> extends AbstractQueue<E>
* unnecessary ordering constraints: Writes that are intrinsically
* ordered wrt other accesses or CASes use simple relaxed forms.
*/
+ @Deprecated
static final class Node {
final boolean isData; // false if this is a request node
volatile Object item; // initially non-null if isData; CASed to match
@@ -789,6 +791,7 @@ public class LinkedTransferQueue<E> extends AbstractQueue<E>
return count;
}
+ @Deprecated
final class Itr implements Iterator<E> {
private Node nextNode; // next node to return item for
private E nextItem; // the corresponding item
diff --git a/src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java b/src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java
index 1e7cdd952d..f4a77f0f61 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java
@@ -133,6 +133,7 @@ package scala.concurrent.forkjoin;
* @since 1.7
* @author Doug Lea
*/
+@Deprecated
public abstract class RecursiveAction extends ForkJoinTask<Void> {
private static final long serialVersionUID = 5232453952276485070L;
diff --git a/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java b/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java
index d1e1547143..097b7cda1f 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java
@@ -36,6 +36,7 @@ package scala.concurrent.forkjoin;
* @since 1.7
* @author Doug Lea
*/
+@Deprecated
public abstract class RecursiveTask<V> extends ForkJoinTask<V> {
private static final long serialVersionUID = 5232453952276485270L;
diff --git a/src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java b/src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java
index 19237c9092..3ea1af66bc 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java
@@ -32,6 +32,7 @@ import java.util.Random;
* @since 1.7
* @author Doug Lea
*/
+@Deprecated
public class ThreadLocalRandom extends Random {
// same constants as Random, but must be redeclared because private
private static final long multiplier = 0x5DEECE66DL;
@@ -80,6 +81,7 @@ public class ThreadLocalRandom extends Random {
*
* @return the current thread's {@code ThreadLocalRandom}
*/
+ @Deprecated
public static ThreadLocalRandom current() {
return localRandom.get();
}
diff --git a/src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java b/src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java
index 7d149c7ae5..4fcd8ea601 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java
@@ -37,6 +37,7 @@ import java.util.concurrent.*;
* @author Doug Lea
* @param <E> the type of elements held in this collection
*/
+@Deprecated
public interface TransferQueue<E> extends BlockingQueue<E> {
/**
* Transfers the element to a waiting consumer immediately, if possible.
diff --git a/src/forkjoin/scala/concurrent/util/Unsafe.java b/src/forkjoin/scala/concurrent/util/Unsafe.java
index ef893c94d9..d82e4bbdd5 100644
--- a/src/forkjoin/scala/concurrent/util/Unsafe.java
+++ b/src/forkjoin/scala/concurrent/util/Unsafe.java
@@ -7,14 +7,12 @@
\* */
package scala.concurrent.util;
-
-
-
import java.lang.reflect.Field;
-
+@Deprecated
public final class Unsafe {
+ @Deprecated
public final static sun.misc.Unsafe instance;
static {
try {
diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala
index 2d09435f60..b6290eb3d3 100644
--- a/src/interactive/scala/tools/nsc/interactive/Global.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Global.scala
@@ -19,6 +19,8 @@ import scala.annotation.{ elidable, tailrec }
import scala.language.implicitConversions
import scala.tools.nsc.typechecker.Typers
import scala.util.control.Breaks._
+import java.util.concurrent.ConcurrentHashMap
+import scala.collection.JavaConverters.mapAsScalaMapConverter
/**
* This trait allows the IDE to have an instance of the PC that
@@ -157,33 +159,20 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
override def forInteractive = true
override protected def synchronizeNames = true
- override def newAsSeenFromMap(pre: Type, clazz: Symbol): AsSeenFromMap =
- new InteractiveAsSeenFromMap(pre, clazz)
-
- class InteractiveAsSeenFromMap(pre: Type, clazz: Symbol) extends AsSeenFromMap(pre, clazz) {
- /** The method formerly known as 'instParamsRelaxed' goes here if it's still necessary,
- * which it is currently supposed it is not.
- *
- * If it is, change AsSeenFromMap method correspondingTypeArgument to call an overridable
- * method rather than aborting in the failure case.
- */
- }
-
/** A map of all loaded files to the rich compilation units that correspond to them.
*/
- val unitOfFile = new LinkedHashMap[AbstractFile, RichCompilationUnit] with
- SynchronizedMap[AbstractFile, RichCompilationUnit] {
+ val unitOfFile = mapAsScalaMapConverter(new ConcurrentHashMap[AbstractFile, RichCompilationUnit] {
override def put(key: AbstractFile, value: RichCompilationUnit) = {
val r = super.put(key, value)
- if (r.isEmpty) debugLog("added unit for "+key)
+ if (r == null) debugLog("added unit for "+key)
r
}
- override def remove(key: AbstractFile) = {
+ override def remove(key: Any) = {
val r = super.remove(key)
- if (r.nonEmpty) debugLog("removed unit for "+key)
+ if (r != null) debugLog("removed unit for "+key)
r
}
- }
+ }).asScala
/** A set containing all those files that need to be removed
* Units are removed by getUnit, typically once a unit is finished compiled.
@@ -1101,7 +1090,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
val implicitlyAdded = viaView != NoSymbol
members.add(sym, pre, implicitlyAdded) { (s, st) =>
new TypeMember(s, st,
- context.isAccessible(if (s.hasGetter) s.getter(s.owner) else s, pre, superAccess && !implicitlyAdded),
+ context.isAccessible(if (s.hasGetter) s.getterIn(s.owner) else s, pre, superAccess && !implicitlyAdded),
inherited,
viaView)
}
diff --git a/src/library/scala/PartialFunction.scala b/src/library/scala/PartialFunction.scala
index fba759eb32..98dd35d306 100644
--- a/src/library/scala/PartialFunction.scala
+++ b/src/library/scala/PartialFunction.scala
@@ -161,10 +161,10 @@ trait PartialFunction[-A, +B] extends (A => B) { self =>
object PartialFunction {
/** Composite function produced by `PartialFunction#orElse` method
*/
- private class OrElse[-A, +B] (f1: PartialFunction[A, B], f2: PartialFunction[A, B]) extends PartialFunction[A, B] {
+ private class OrElse[-A, +B] (f1: PartialFunction[A, B], f2: PartialFunction[A, B]) extends scala.runtime.AbstractPartialFunction[A, B] {
def isDefinedAt(x: A) = f1.isDefinedAt(x) || f2.isDefinedAt(x)
- def apply(x: A): B = f1.applyOrElse(x, f2)
+ override def apply(x: A): B = f1.applyOrElse(x, f2)
override def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = {
val z = f1.applyOrElse(x, checkFallback[B])
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index bf7739345e..060ecbfead 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -126,7 +126,7 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef {
def optManifest[T](implicit m: OptManifest[T]) = m
// Minor variations on identity functions
- def identity[A](x: A): A = x // @see `conforms` for the implicit version
+ @inline def identity[A](x: A): A = x // @see `conforms` for the implicit version
@inline def implicitly[T](implicit e: T) = e // for summoning implicit values from the nether world -- TODO: when dependent method types are on by default, give this result type `e.type`, so that inliner has better chance of knowing which method to inline in calls like `implicitly[MatchingStrategy[Option]].zero`
@inline def locally[T](x: T): T = x // to communicate intent and avoid unmoored statements
diff --git a/src/library/scala/beans/BeanInfo.scala b/src/library/scala/beans/BeanInfo.scala
index 799e93e71a..d7f0a1618b 100644
--- a/src/library/scala/beans/BeanInfo.scala
+++ b/src/library/scala/beans/BeanInfo.scala
@@ -17,4 +17,5 @@ package scala.beans
*
* @author Ross Judson (rjudson@managedobjects.com)
*/
+@deprecated(message = "the generation of BeanInfo classes is no longer supported", since = "2.12.0")
class BeanInfo extends scala.annotation.Annotation
diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala
index b84d90c51b..c254ed7480 100644
--- a/src/library/scala/collection/IterableViewLike.scala
+++ b/src/library/scala/collection/IterableViewLike.scala
@@ -69,6 +69,10 @@ trait IterableViewLike[+A,
trait Appended[B >: A] extends super.Appended[B] with Transformed[B] {
def iterator = self.iterator ++ rest
}
+
+ trait Prepended[B >: A] extends super.Prepended[B] with Transformed[B] {
+ def iterator = fst.toIterator ++ self
+ }
trait Filtered extends super.Filtered with Transformed[A] {
def iterator = self.iterator filter pred
@@ -110,6 +114,7 @@ trait IterableViewLike[+A,
} with AbstractTransformed[(A1, B)] with ZippedAll[A1, B]
protected override def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
protected override def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
+ protected override def newPrepended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val fst = that } with AbstractTransformed[B] with Prepended[B]
protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
protected override def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index 0783beac0f..34a025e5b8 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -182,7 +182,7 @@ object Iterator {
}
}
def hasNext = (current ne null) && (current.hasNext || advance())
- def next() = if (hasNext) current.next else Iterator.empty.next
+ def next() = if (hasNext) current.next() else Iterator.empty.next()
override def ++[B >: A](that: => GenTraversableOnce[B]): Iterator[B] =
new ConcatIterator(current, queue :+ (() => that.toIterator))
@@ -191,11 +191,55 @@ object Iterator {
private[scala] final class JoinIterator[+A](lhs: Iterator[A], that: => GenTraversableOnce[A]) extends Iterator[A] {
private[this] lazy val rhs: Iterator[A] = that.toIterator
def hasNext = lhs.hasNext || rhs.hasNext
- def next = if (lhs.hasNext) lhs.next else rhs.next
+ def next() = if (lhs.hasNext) lhs.next() else rhs.next()
override def ++[B >: A](that: => GenTraversableOnce[B]) =
new ConcatIterator(this, Vector(() => that.toIterator))
}
+
+ /** Creates a delegating iterator capped by a limit count. Negative limit means unbounded.
+ * Lazily skip to start on first evaluation. Avoids daisy-chained iterators due to slicing.
+ */
+ private[scala] final class SliceIterator[A](val underlying: Iterator[A], start: Int, limit: Int) extends AbstractIterator[A] {
+ private var remaining = limit
+ private var dropping = start
+ @inline private def unbounded = remaining < 0
+ private def skip(): Unit =
+ while (dropping > 0) {
+ if (underlying.hasNext) {
+ underlying.next()
+ dropping -= 1
+ } else
+ dropping = 0
+ }
+ def hasNext = { skip(); remaining != 0 && underlying.hasNext }
+ def next() = {
+ skip()
+ if (remaining > 0) {
+ remaining -= 1
+ underlying.next()
+ }
+ else if (unbounded) underlying.next()
+ else empty.next()
+ }
+ override protected def sliceIterator(from: Int, until: Int): Iterator[A] = {
+ val lo = from max 0
+ def adjustedBound =
+ if (unbounded) -1
+ else 0 max (remaining - lo)
+ val rest =
+ if (until < 0) adjustedBound // respect current bound, if any
+ else if (until <= lo) 0 // empty
+ else if (unbounded) until - lo // now finite
+ else adjustedBound min (until - lo) // keep lesser bound
+ if (rest == 0) empty
+ else {
+ dropping += lo
+ remaining = rest
+ this
+ }
+ }
+ }
}
import Iterator.empty
@@ -307,11 +351,11 @@ trait Iterator[+A] extends TraversableOnce[A] {
/** Selects first ''n'' values of this iterator.
*
* @param n the number of values to take
- * @return an iterator producing only of the first `n` values of this iterator, or else the
+ * @return an iterator producing only the first `n` values of this iterator, or else the
* whole iterator, if it produces fewer than `n` values.
* @note Reuse: $consumesAndProducesIterator
*/
- def take(n: Int): Iterator[A] = slice(0, n)
+ def take(n: Int): Iterator[A] = sliceIterator(0, n max 0)
/** Advances this iterator past the first ''n'' elements, or the length of the iterator, whichever is smaller.
*
@@ -332,29 +376,24 @@ trait Iterator[+A] extends TraversableOnce[A] {
/** Creates an iterator returning an interval of the values produced by this iterator.
*
* @param from the index of the first element in this iterator which forms part of the slice.
- * @param until the index of the first element following the slice.
+ * If negative, the slice starts at zero.
+ * @param until the index of the first element following the slice. If negative, the slice is empty.
* @return an iterator which advances this iterator past the first `from` elements using `drop`,
* and then takes `until - from` elements, using `take`.
* @note Reuse: $consumesAndProducesIterator
*/
- def slice(from: Int, until: Int): Iterator[A] = {
+ def slice(from: Int, until: Int): Iterator[A] = sliceIterator(from, until max 0)
+
+ /** Creates an optionally bounded slice, unbounded if `until` is negative. */
+ protected def sliceIterator(from: Int, until: Int): Iterator[A] = {
val lo = from max 0
- var toDrop = lo
- while (toDrop > 0 && self.hasNext) {
- self.next()
- toDrop -= 1
- }
+ val rest =
+ if (until < 0) -1 // unbounded
+ else if (until <= lo) 0 // empty
+ else until - lo // finite
- new AbstractIterator[A] {
- private var remaining = until - lo
- def hasNext = remaining > 0 && self.hasNext
- def next(): A =
- if (remaining > 0) {
- remaining -= 1
- self.next()
- }
- else empty.next()
- }
+ if (rest == 0) empty
+ else new Iterator.SliceIterator(this, lo, rest)
}
/** Creates a new iterator that maps all produced values of this iterator
@@ -1155,9 +1194,8 @@ trait Iterator[+A] extends TraversableOnce[A] {
* $willNotTerminateInf
*/
def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit = {
- require(start >= 0 && (start < xs.length || xs.length == 0), s"start $start out of range ${xs.length}")
var i = start
- val end = start + math.min(len, xs.length - start)
+ val end = start + math.min(len, xs.length - start)
while (i < end && hasNext) {
xs(i) = next()
i += 1
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index d133400570..b474abc12a 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -230,11 +230,15 @@ self =>
protected class FilteredKeys(p: A => Boolean) extends AbstractMap[A, B] with DefaultMap[A, B] {
override def foreach[C](f: ((A, B)) => C): Unit = for (kv <- self) if (p(kv._1)) f(kv)
def iterator = self.iterator.filter(kv => p(kv._1))
- override def contains(key: A) = self.contains(key) && p(key)
+ override def contains(key: A) = p(key) && self.contains(key)
def get(key: A) = if (!p(key)) None else self.get(key)
}
/** Filters this map by retaining only keys satisfying a predicate.
+ *
+ * '''Note''': the predicate must accept any key of type `A`, not just those already
+ * present in the map, as the predicate is tested before the underlying map is queried.
+ *
* @param p the predicate used to test keys
* @return an immutable map consisting only of those key value pairs of this map where the key satisfies
* the predicate `p`. The resulting map wraps the original map without copying any elements.
@@ -319,11 +323,20 @@ self =>
res
}
- /* Overridden for efficiency. */
- override def toSeq: Seq[(A, B)] = toBuffer[(A, B)]
+ override def toSeq: Seq[(A, B)] = {
+ if (isEmpty) Vector.empty[(A, B)]
+ else {
+ // Default appropriate for immutable collections; mutable collections override this
+ val vb = Vector.newBuilder[(A, B)]
+ foreach(vb += _)
+ vb.result
+ }
+ }
+
override def toBuffer[C >: (A, B)]: mutable.Buffer[C] = {
val result = new mutable.ArrayBuffer[C](size)
- copyToBuffer(result)
+ // Faster to let the map iterate itself than to defer through copyToBuffer
+ foreach(result += _)
result
}
diff --git a/src/library/scala/collection/SeqViewLike.scala b/src/library/scala/collection/SeqViewLike.scala
index 3473c8aff1..1fbcb6531e 100644
--- a/src/library/scala/collection/SeqViewLike.scala
+++ b/src/library/scala/collection/SeqViewLike.scala
@@ -96,6 +96,14 @@ trait SeqViewLike[+A,
if (idx < self.length) self(idx) else restSeq(idx - self.length)
}
+ trait Prepended[B >: A] extends super.Prepended[B] with Transformed[B] {
+ protected[this] lazy val fstSeq = fst.toSeq
+ def length: Int = fstSeq.length + self.length
+ def apply(idx: Int): B =
+ if (idx < fstSeq.length) fstSeq(idx)
+ else self.apply(idx - fstSeq.length)
+ }
+
trait Filtered extends super.Filtered with Transformed[A] {
protected[this] lazy val index = {
var len = 0
@@ -179,21 +187,12 @@ trait SeqViewLike[+A,
final override protected[this] def viewIdentifier = "P"
}
- trait Prepended[B >: A] extends Transformed[B] {
- protected[this] val fst: B
- override def iterator: Iterator[B] = Iterator.single(fst) ++ self.iterator
- def length: Int = 1 + self.length
- def apply(idx: Int): B =
- if (idx == 0) fst
- else self.apply(idx - 1)
- final override protected[this] def viewIdentifier = "A"
- }
-
/** Boilerplate method, to override in each subclass
* This method could be eliminated if Scala had virtual classes
*/
protected override def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
protected override def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
+ protected override def newPrepended[B >: A](that: GenTraversable[B]): Transformed[B] = new { protected[this] val fst = that } with AbstractTransformed[B] with Prepended[B]
protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
protected override def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
@@ -212,7 +211,6 @@ trait SeqViewLike[+A,
val patch = _patch
val replaced = _replaced
} with AbstractTransformed[B] with Patched[B]
- protected def newPrepended[B >: A](elem: B): Transformed[B] = new { protected[this] val fst = elem } with AbstractTransformed[B] with Prepended[B]
// see comment in IterableViewLike.
protected override def newTaken(n: Int): Transformed[A] = newSliced(SliceInterval(0, n))
@@ -242,7 +240,7 @@ trait SeqViewLike[+A,
}
override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That =
- newPrepended(elem).asInstanceOf[That]
+ newPrepended(elem :: Nil).asInstanceOf[That]
override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That =
++(Iterator.single(elem))(bf)
diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala
index f8ac1d754d..d03c808c2c 100644
--- a/src/library/scala/collection/SetLike.scala
+++ b/src/library/scala/collection/SetLike.scala
@@ -77,11 +77,20 @@ self =>
protected[this] override def parCombiner = ParSet.newCombiner[A]
- /* Overridden for efficiency. */
- override def toSeq: Seq[A] = toBuffer[A]
+ // Default collection type appropriate for immutable collections; mutable collections override this
+ override def toSeq: Seq[A] = {
+ if (isEmpty) Vector.empty[A]
+ else {
+ val vb = Vector.newBuilder[A]
+ foreach(vb += _)
+ vb.result
+ }
+ }
+
override def toBuffer[A1 >: A]: mutable.Buffer[A1] = {
val result = new mutable.ArrayBuffer[A1](size)
- copyToBuffer(result)
+ // Faster to let the map iterate itself than to defer through copyToBuffer
+ foreach(result += _)
result
}
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index 32d31f0be8..5a07874fd6 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -253,7 +253,7 @@ trait TraversableLike[+A, +Repr] extends Any
b.result
}
- private def filterImpl(p: A => Boolean, isFlipped: Boolean): Repr = {
+ private[scala] def filterImpl(p: A => Boolean, isFlipped: Boolean): Repr = {
val b = newBuilder
for (x <- this)
if (p(x) != isFlipped) b += x
diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala
index 5926c69ebf..0901d749c3 100644
--- a/src/library/scala/collection/TraversableViewLike.scala
+++ b/src/library/scala/collection/TraversableViewLike.scala
@@ -189,6 +189,15 @@ trait TraversableViewLike[+A,
}
final override protected[this] def viewIdentifier = "A"
}
+
+ trait Prepended[B >: A] extends Transformed[B] {
+ protected[this] val fst: GenTraversable[B]
+ def foreach[U](f: B => U) {
+ fst foreach f
+ self foreach f
+ }
+ final override protected[this] def viewIdentifier = "A"
+ }
trait Filtered extends Transformed[A] {
protected[this] val pred: A => Boolean
@@ -222,11 +231,15 @@ trait TraversableViewLike[+A,
final override protected[this] def viewIdentifier = "D"
}
- override def ++[B >: A, That](xs: GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That = {
+ override def ++[B >: A, That](xs: GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That =
newAppended(xs.seq.toTraversable).asInstanceOf[That]
-// was: if (bf.isInstanceOf[ByPassCanBuildFrom]) newAppended(that).asInstanceOf[That]
-// else super.++[B, That](that)(bf)
- }
+
+ override def ++:[B >: A, That](xs: TraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That =
+ newPrepended(xs.seq.toTraversable).asInstanceOf[That]
+
+ // Need second one because of optimization in TraversableLike
+ override def ++:[B >: A, That](xs: Traversable[B])(implicit bf: CanBuildFrom[This, B, That]): That =
+ newPrepended(xs).asInstanceOf[That]
override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That = {
newMapped(f).asInstanceOf[That]
@@ -253,6 +266,7 @@ trait TraversableViewLike[+A,
*/
protected def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
protected def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
+ protected def newPrepended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val fst = that } with AbstractTransformed[B] with Prepended[B]
protected def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
protected def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
protected def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
diff --git a/src/library/scala/collection/concurrent/Map.scala b/src/library/scala/collection/concurrent/Map.scala
index 2eea15b8dc..f0a5f57225 100644
--- a/src/library/scala/collection/concurrent/Map.scala
+++ b/src/library/scala/collection/concurrent/Map.scala
@@ -86,4 +86,15 @@ trait Map[A, B] extends scala.collection.mutable.Map[A, B] {
* @return `Some(v)` if the given key was previously mapped to some value `v`, or `None` otherwise
*/
def replace(k: A, v: B): Option[B]
+
+ override def getOrElseUpdate(key: A, op: =>B): B = get(key) match {
+ case Some(v) => v
+ case None =>
+ val v = op
+ putIfAbsent(key, v) match {
+ case Some(nv) => nv
+ case None => v
+ }
+ }
+
}
diff --git a/src/library/scala/collection/convert/WrapAsJava.scala b/src/library/scala/collection/convert/WrapAsJava.scala
index 9916fe9843..e97a2ff1fc 100644
--- a/src/library/scala/collection/convert/WrapAsJava.scala
+++ b/src/library/scala/collection/convert/WrapAsJava.scala
@@ -30,8 +30,9 @@ trait WrapAsJava {
* @return A Java Iterator view of the argument.
*/
implicit def asJavaIterator[A](it: Iterator[A]): ju.Iterator[A] = it match {
- case JIteratorWrapper(wrapped) => wrapped.asInstanceOf[ju.Iterator[A]]
- case _ => IteratorWrapper(it)
+ case null => null
+ case JIteratorWrapper(wrapped) => wrapped.asInstanceOf[ju.Iterator[A]]
+ case _ => IteratorWrapper(it)
}
/**
@@ -48,8 +49,9 @@ trait WrapAsJava {
* @return A Java Enumeration view of the argument.
*/
implicit def asJavaEnumeration[A](it: Iterator[A]): ju.Enumeration[A] = it match {
+ case null => null
case JEnumerationWrapper(wrapped) => wrapped.asInstanceOf[ju.Enumeration[A]]
- case _ => IteratorWrapper(it)
+ case _ => IteratorWrapper(it)
}
/**
@@ -66,8 +68,9 @@ trait WrapAsJava {
* @return A Java Iterable view of the argument.
*/
implicit def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = i match {
- case JIterableWrapper(wrapped) => wrapped.asInstanceOf[jl.Iterable[A]]
- case _ => IterableWrapper(i)
+ case null => null
+ case JIterableWrapper(wrapped) => wrapped.asInstanceOf[jl.Iterable[A]]
+ case _ => IterableWrapper(i)
}
/**
@@ -82,8 +85,9 @@ trait WrapAsJava {
* @return A Java Collection view of the argument.
*/
implicit def asJavaCollection[A](it: Iterable[A]): ju.Collection[A] = it match {
- case JCollectionWrapper(wrapped) => wrapped.asInstanceOf[ju.Collection[A]]
- case _ => new IterableWrapper(it)
+ case null => null
+ case JCollectionWrapper(wrapped) => wrapped.asInstanceOf[ju.Collection[A]]
+ case _ => new IterableWrapper(it)
}
/**
@@ -100,8 +104,9 @@ trait WrapAsJava {
* @return A Java List view of the argument.
*/
implicit def bufferAsJavaList[A](b: mutable.Buffer[A]): ju.List[A] = b match {
- case JListWrapper(wrapped) => wrapped
- case _ => new MutableBufferWrapper(b)
+ case null => null
+ case JListWrapper(wrapped) => wrapped
+ case _ => new MutableBufferWrapper(b)
}
/**
@@ -118,8 +123,9 @@ trait WrapAsJava {
* @return A Java List view of the argument.
*/
implicit def mutableSeqAsJavaList[A](seq: mutable.Seq[A]): ju.List[A] = seq match {
- case JListWrapper(wrapped) => wrapped
- case _ => new MutableSeqWrapper(seq)
+ case null => null
+ case JListWrapper(wrapped) => wrapped
+ case _ => new MutableSeqWrapper(seq)
}
/**
@@ -136,8 +142,9 @@ trait WrapAsJava {
* @return A Java List view of the argument.
*/
implicit def seqAsJavaList[A](seq: Seq[A]): ju.List[A] = seq match {
- case JListWrapper(wrapped) => wrapped.asInstanceOf[ju.List[A]]
- case _ => new SeqWrapper(seq)
+ case null => null
+ case JListWrapper(wrapped) => wrapped.asInstanceOf[ju.List[A]]
+ case _ => new SeqWrapper(seq)
}
/**
@@ -154,8 +161,9 @@ trait WrapAsJava {
* @return A Java Set view of the argument.
*/
implicit def mutableSetAsJavaSet[A](s: mutable.Set[A]): ju.Set[A] = s match {
+ case null => null
case JSetWrapper(wrapped) => wrapped
- case _ => new MutableSetWrapper(s)
+ case _ => new MutableSetWrapper(s)
}
/**
@@ -172,8 +180,9 @@ trait WrapAsJava {
* @return A Java Set view of the argument.
*/
implicit def setAsJavaSet[A](s: Set[A]): ju.Set[A] = s match {
+ case null => null
case JSetWrapper(wrapped) => wrapped
- case _ => new SetWrapper(s)
+ case _ => new SetWrapper(s)
}
/**
@@ -190,9 +199,9 @@ trait WrapAsJava {
* @return A Java Map view of the argument.
*/
implicit def mutableMapAsJavaMap[A, B](m: mutable.Map[A, B]): ju.Map[A, B] = m match {
- //case JConcurrentMapWrapper(wrapped) => wrapped
+ case null => null
case JMapWrapper(wrapped) => wrapped
- case _ => new MutableMapWrapper(m)
+ case _ => new MutableMapWrapper(m)
}
/**
@@ -210,9 +219,9 @@ trait WrapAsJava {
* @return A Java `Dictionary` view of the argument.
*/
implicit def asJavaDictionary[A, B](m: mutable.Map[A, B]): ju.Dictionary[A, B] = m match {
- //case JConcurrentMapWrapper(wrapped) => wrapped
- case JDictionaryWrapper(wrapped) => wrapped
- case _ => new DictionaryWrapper(m)
+ case null => null
+ case JDictionaryWrapper(wrapped) => wrapped
+ case _ => new DictionaryWrapper(m)
}
/**
@@ -230,9 +239,9 @@ trait WrapAsJava {
* @return A Java `Map` view of the argument.
*/
implicit def mapAsJavaMap[A, B](m: Map[A, B]): ju.Map[A, B] = m match {
- //case JConcurrentMapWrapper(wrapped) => wrapped
+ case null => null
case JMapWrapper(wrapped) => wrapped.asInstanceOf[ju.Map[A, B]]
- case _ => new MapWrapper(m)
+ case _ => new MapWrapper(m)
}
/**
@@ -251,8 +260,9 @@ trait WrapAsJava {
* @return A Java `ConcurrentMap` view of the argument.
*/
implicit def mapAsJavaConcurrentMap[A, B](m: concurrent.Map[A, B]): juc.ConcurrentMap[A, B] = m match {
+ case null => null
case JConcurrentMapWrapper(wrapped) => wrapped
- case _ => new ConcurrentMapWrapper(m)
+ case _ => new ConcurrentMapWrapper(m)
}
}
diff --git a/src/library/scala/collection/convert/WrapAsScala.scala b/src/library/scala/collection/convert/WrapAsScala.scala
index ab151a6778..7332b71af1 100644
--- a/src/library/scala/collection/convert/WrapAsScala.scala
+++ b/src/library/scala/collection/convert/WrapAsScala.scala
@@ -30,8 +30,9 @@ trait WrapAsScala {
* @return A Scala `Iterator` view of the argument.
*/
implicit def asScalaIterator[A](it: ju.Iterator[A]): Iterator[A] = it match {
+ case null => null
case IteratorWrapper(wrapped) => wrapped
- case _ => JIteratorWrapper(it)
+ case _ => JIteratorWrapper(it)
}
/**
@@ -48,8 +49,9 @@ trait WrapAsScala {
* @return A Scala Iterator view of the argument.
*/
implicit def enumerationAsScalaIterator[A](i: ju.Enumeration[A]): Iterator[A] = i match {
+ case null => null
case IteratorWrapper(wrapped) => wrapped
- case _ => JEnumerationWrapper(i)
+ case _ => JEnumerationWrapper(i)
}
/**
@@ -67,8 +69,9 @@ trait WrapAsScala {
* @return A Scala Iterable view of the argument.
*/
implicit def iterableAsScalaIterable[A](i: jl.Iterable[A]): Iterable[A] = i match {
+ case null => null
case IterableWrapper(wrapped) => wrapped
- case _ => JIterableWrapper(i)
+ case _ => JIterableWrapper(i)
}
/**
@@ -82,8 +85,9 @@ trait WrapAsScala {
* @return A Scala Iterable view of the argument.
*/
implicit def collectionAsScalaIterable[A](i: ju.Collection[A]): Iterable[A] = i match {
+ case null => null
case IterableWrapper(wrapped) => wrapped
- case _ => JCollectionWrapper(i)
+ case _ => JCollectionWrapper(i)
}
/**
@@ -101,8 +105,9 @@ trait WrapAsScala {
* @return A Scala mutable `Buffer` view of the argument.
*/
implicit def asScalaBuffer[A](l: ju.List[A]): mutable.Buffer[A] = l match {
- case MutableBufferWrapper(wrapped) => wrapped
- case _ =>new JListWrapper(l)
+ case null => null
+ case MutableBufferWrapper(wrapped) => wrapped
+ case _ => new JListWrapper(l)
}
/**
@@ -119,8 +124,9 @@ trait WrapAsScala {
* @return A Scala mutable Set view of the argument.
*/
implicit def asScalaSet[A](s: ju.Set[A]): mutable.Set[A] = s match {
+ case null => null
case MutableSetWrapper(wrapped) => wrapped
- case _ =>new JSetWrapper(s)
+ case _ => new JSetWrapper(s)
}
/**
@@ -144,9 +150,9 @@ trait WrapAsScala {
* @return A Scala mutable Map view of the argument.
*/
implicit def mapAsScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = m match {
- //case ConcurrentMapWrapper(wrapped) => wrapped
+ case null => null
case MutableMapWrapper(wrapped) => wrapped
- case _ => new JMapWrapper(m)
+ case _ => new JMapWrapper(m)
}
/**
@@ -163,8 +169,9 @@ trait WrapAsScala {
* @return A Scala mutable ConcurrentMap view of the argument.
*/
implicit def mapAsScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = m match {
- case cmw: ConcurrentMapWrapper[a, b] => cmw.underlying
- case _ => new JConcurrentMapWrapper(m)
+ case null => null
+ case cmw: ConcurrentMapWrapper[A, B] => cmw.underlying
+ case _ => new JConcurrentMapWrapper(m)
}
/**
@@ -179,8 +186,9 @@ trait WrapAsScala {
* @return A Scala mutable Map[String, String] view of the argument.
*/
implicit def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = p match {
+ case null => null
case DictionaryWrapper(wrapped) => wrapped
- case _ => new JDictionaryWrapper(p)
+ case _ => new JDictionaryWrapper(p)
}
/**
@@ -194,7 +202,8 @@ trait WrapAsScala {
* @return A Scala mutable Map[String, String] view of the argument.
*/
implicit def propertiesAsScalaMap(p: ju.Properties): mutable.Map[String, String] = p match {
- case _ => new JPropertiesWrapper(p)
+ case null => null
+ case _ => new JPropertiesWrapper(p)
}
}
diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala
index f548eac88d..49b4397cf2 100644
--- a/src/library/scala/collection/immutable/HashSet.scala
+++ b/src/library/scala/collection/immutable/HashSet.scala
@@ -194,7 +194,7 @@ class HashSet[A] extends AbstractSet[A]
protected def get0(key: A, hash: Int, level: Int): Boolean = false
- def updated0(key: A, hash: Int, level: Int): HashSet[A] =
+ private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] =
new HashSet.HashSet1(key, hash)
protected def removed0(key: A, hash: Int, level: Int): HashSet[A] = this
@@ -256,10 +256,10 @@ object HashSet extends ImmutableSetFactory[HashSet] {
class HashSet1[A](private[HashSet] val key: A, private[HashSet] val hash: Int) extends LeafHashSet[A] {
override def size = 1
- override def get0(key: A, hash: Int, level: Int): Boolean =
+ override protected def get0(key: A, hash: Int, level: Int): Boolean =
(hash == this.hash && key == this.key)
- override def subsetOf0(that: HashSet[A], level: Int) = {
+ override protected def subsetOf0(that: HashSet[A], level: Int) = {
// check if that contains this.key
// we use get0 with our key and hash at the correct level instead of calling contains,
// which would not work since that might not be a top-level HashSet
@@ -267,7 +267,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
that.get0(key, hash, level)
}
- override def updated0(key: A, hash: Int, level: Int): HashSet[A] =
+ override private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] =
if (hash == this.hash && key == this.key) this
else {
if (hash != this.hash) {
@@ -312,7 +312,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
override private[immutable] def diff0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] =
if (that.get0(key, hash, level)) null else this
- override def removed0(key: A, hash: Int, level: Int): HashSet[A] =
+ override protected def removed0(key: A, hash: Int, level: Int): HashSet[A] =
if (hash == this.hash && key == this.key) null else this
override protected def filter0(p: A => Boolean, negate: Boolean, level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] =
@@ -326,10 +326,10 @@ object HashSet extends ImmutableSetFactory[HashSet] {
override def size = ks.size
- override def get0(key: A, hash: Int, level: Int): Boolean =
+ override protected def get0(key: A, hash: Int, level: Int): Boolean =
if (hash == this.hash) ks.contains(key) else false
- override def subsetOf0(that: HashSet[A], level: Int) = {
+ override protected def subsetOf0(that: HashSet[A], level: Int) = {
// we have to check each element
// we use get0 with our hash at the correct level instead of calling contains,
// which would not work since that might not be a top-level HashSet
@@ -337,11 +337,11 @@ object HashSet extends ImmutableSetFactory[HashSet] {
ks.forall(key => that.get0(key, hash, level))
}
- override def updated0(key: A, hash: Int, level: Int): HashSet[A] =
+ override private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] =
if (hash == this.hash) new HashSetCollision1(hash, ks + key)
else makeHashTrieSet(this.hash, this, hash, new HashSet1(key, hash), level)
- override def union0(that: LeafHashSet[A], level: Int): HashSet[A] = that match {
+ override private[immutable] def union0(that: LeafHashSet[A], level: Int): HashSet[A] = that match {
case that if that.hash != this.hash =>
// different hash code, so there is no need to investigate further.
// Just create a branch node containing the two.
@@ -374,7 +374,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
}
}
- override def union0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = that match {
+ override private[immutable] def union0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = that match {
case that: LeafHashSet[A] =>
// switch to the simpler Tree/Leaf implementation
this.union0(that, level)
@@ -431,7 +431,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
}
}
- override def removed0(key: A, hash: Int, level: Int): HashSet[A] =
+ override protected def removed0(key: A, hash: Int, level: Int): HashSet[A] =
if (hash == this.hash) {
val ks1 = ks - key
ks1.size match {
@@ -528,7 +528,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
override def size = size0
- override def get0(key: A, hash: Int, level: Int): Boolean = {
+ override protected def get0(key: A, hash: Int, level: Int): Boolean = {
val index = (hash >>> level) & 0x1f
val mask = (1 << index)
if (bitmap == - 1) {
@@ -540,7 +540,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
false
}
- override def updated0(key: A, hash: Int, level: Int): HashSet[A] = {
+ override private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] = {
val index = (hash >>> level) & 0x1f
val mask = (1 << index)
val offset = Integer.bitCount(bitmap & (mask-1))
@@ -842,7 +842,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
case _ => this
}
- override def removed0(key: A, hash: Int, level: Int): HashSet[A] = {
+ override protected def removed0(key: A, hash: Int, level: Int): HashSet[A] = {
val index = (hash >>> level) & 0x1f
val mask = (1 << index)
val offset = Integer.bitCount(bitmap & (mask-1))
@@ -879,7 +879,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
}
}
- override def subsetOf0(that: HashSet[A], level: Int): Boolean = if (that eq this) true else that match {
+ override protected def subsetOf0(that: HashSet[A], level: Int): Boolean = if (that eq this) true else that match {
case that: HashTrieSet[A] if this.size0 <= that.size0 =>
// create local mutable copies of members
var abm = this.bitmap
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 254f14f13c..48a6ca5699 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -13,7 +13,7 @@ package immutable
import generic._
import mutable.{Builder, ListBuffer}
import scala.annotation.tailrec
-import java.io._
+import java.io.{ObjectOutputStream, ObjectInputStream}
/** A class for immutable linked lists representing ordered collections
* of elements of type.
@@ -86,7 +86,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
with Product
with GenericTraversableTemplate[A, List]
with LinearSeqOptimized[A, List[A]]
- with Serializable {
+ with scala.Serializable {
override def companion: GenericCompanion[List] = List
import scala.collection.{Iterable, Traversable, Seq, IndexedSeq}
diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala
index 7c40e84280..c5773338f5 100644
--- a/src/library/scala/collection/immutable/ListMap.scala
+++ b/src/library/scala/collection/immutable/ListMap.scala
@@ -29,7 +29,11 @@ object ListMap extends ImmutableMapFactory[ListMap] {
new MapCanBuildFrom[A, B]
def empty[A, B]: ListMap[A, B] = EmptyListMap.asInstanceOf[ListMap[A, B]]
- private object EmptyListMap extends ListMap[Any, Nothing] { }
+ @SerialVersionUID(-8256686706655863282L)
+ private object EmptyListMap extends ListMap[Any, Nothing] {
+ override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: Any) = false
+ }
}
/** This class implements immutable maps using a list-based data structure.
@@ -159,7 +163,6 @@ extends AbstractMap[A, B]
*/
override def apply(k: A): B1 = apply0(this, k)
-
@tailrec private def apply0(cur: ListMap[A, B1], k: A): B1 =
if (cur.isEmpty) throw new NoSuchElementException("key not found: "+k)
else if (k == cur.key) cur.value
@@ -176,7 +179,16 @@ extends AbstractMap[A, B]
@tailrec private def get0(cur: ListMap[A, B1], k: A): Option[B1] =
if (k == cur.key) Some(cur.value)
else if (cur.next.nonEmpty) get0(cur.next, k) else None
-
+
+
+ override def contains(key: A): Boolean = contains0(this, key)
+
+ @tailrec private def contains0(cur: ListMap[A, B1], k: A): Boolean =
+ if (k == cur.key) true
+ else if (cur.next.nonEmpty) contains0(cur.next, k)
+ else false
+
+
/** This method allows one to create a new map with an additional mapping
* from `key` to `value`. If the map contains already a mapping for `key`,
* it will be overridden by this function.
@@ -186,6 +198,7 @@ extends AbstractMap[A, B]
new m.Node[B2](k, v)
}
+
/** Creates a new mapping without the given `key`.
* If the map does not contain a mapping for the given key, the
* method returns the same map.
diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala
index 5178d5a862..63ddcb18cf 100644
--- a/src/library/scala/collection/immutable/Map.scala
+++ b/src/library/scala/collection/immutable/Map.scala
@@ -94,6 +94,8 @@ object Map extends ImmutableMapFactory[Map] {
private object EmptyMap extends AbstractMap[Any, Nothing] with Map[Any, Nothing] with Serializable {
override def size: Int = 0
+ override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: Any) = false
def get(key: Any): Option[Nothing] = None
def iterator: Iterator[(Any, Nothing)] = Iterator.empty
override def updated [B1] (key: Any, value: B1): Map[Any, B1] = new Map1(key, value)
@@ -103,6 +105,8 @@ object Map extends ImmutableMapFactory[Map] {
class Map1[A, +B](key1: A, value1: B) extends AbstractMap[A, B] with Map[A, B] with Serializable {
override def size = 1
+ override def apply(key: A) = if (key == key1) value1 else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: A) = key == key1
def get(key: A): Option[B] =
if (key == key1) Some(value1) else None
def iterator = Iterator((key1, value1))
@@ -119,6 +123,11 @@ object Map extends ImmutableMapFactory[Map] {
class Map2[A, +B](key1: A, value1: B, key2: A, value2: B) extends AbstractMap[A, B] with Map[A, B] with Serializable {
override def size = 2
+ override def apply(key: A) =
+ if (key == key1) value1
+ else if (key == key2) value2
+ else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: A) = (key == key1) || (key == key2)
def get(key: A): Option[B] =
if (key == key1) Some(value1)
else if (key == key2) Some(value2)
@@ -140,6 +149,12 @@ object Map extends ImmutableMapFactory[Map] {
class Map3[A, +B](key1: A, value1: B, key2: A, value2: B, key3: A, value3: B) extends AbstractMap[A, B] with Map[A, B] with Serializable {
override def size = 3
+ override def apply(key: A) =
+ if (key == key1) value1
+ else if (key == key2) value2
+ else if (key == key3) value3
+ else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: A) = (key == key1) || (key == key2) || (key == key3)
def get(key: A): Option[B] =
if (key == key1) Some(value1)
else if (key == key2) Some(value2)
@@ -164,6 +179,13 @@ object Map extends ImmutableMapFactory[Map] {
class Map4[A, +B](key1: A, value1: B, key2: A, value2: B, key3: A, value3: B, key4: A, value4: B) extends AbstractMap[A, B] with Map[A, B] with Serializable {
override def size = 4
+ override def apply(key: A) =
+ if (key == key1) value1
+ else if (key == key2) value2
+ else if (key == key3) value3
+ else if (key == key4) value4
+ else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: A) = (key == key1) || (key == key2) || (key == key3) || (key == key4)
def get(key: A): Option[B] =
if (key == key1) Some(value1)
else if (key == key2) Some(value2)
diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala
index f11217d26a..8910ee16b9 100644
--- a/src/library/scala/collection/immutable/PagedSeq.scala
+++ b/src/library/scala/collection/immutable/PagedSeq.scala
@@ -12,7 +12,7 @@ package scala
package collection
package immutable
-import java.io._
+import java.io.{File, FileReader, Reader}
import scala.util.matching.Regex
import scala.reflect.ClassTag
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index 3ae8a2c342..0b380517f8 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -202,7 +202,24 @@ extends scala.collection.AbstractSeq[Int]
copy(locationAfterN(n), end, step)
}
)
-
+
+ /** Creates a new range containing the elements starting at `from` up to but not including `until`.
+ *
+ * $doesNotUseBuilders
+ *
+ * @param from the element at which to start
+ * @param until the element at which to end (not included in the range)
+ * @return a new range consisting of a contiguous interval of values in the old range
+ */
+ override def slice(from: Int, until: Int): Range =
+ if (from <= 0) take(until)
+ else if (until >= numRangeElements && numRangeElements >= 0) drop(from)
+ else {
+ val fromValue = locationAfterN(from)
+ if (from >= until) newEmptyRange(fromValue)
+ else new Range.Inclusive(fromValue, locationAfterN(until-1), step)
+ }
+
/** Creates a new range containing all the elements of this range except the last one.
*
* $doesNotUseBuilders
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index f303e79bb3..5fff727c36 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -499,6 +499,16 @@ self =>
)
else super.flatMap(f)(bf)
+ override private[scala] def filterImpl(p: A => Boolean, isFlipped: Boolean): Stream[A] = {
+ // optimization: drop leading prefix of elems for which f returns false
+ // var rest = this dropWhile (!p(_)) - forget DRY principle - GC can't collect otherwise
+ var rest = this
+ while (!rest.isEmpty && p(rest.head) == isFlipped) rest = rest.tail
+ // private utility func to avoid `this` on stack (would be needed for the lazy arg)
+ if (rest.nonEmpty) Stream.filteredTail(rest, p, isFlipped)
+ else Stream.Empty
+ }
+
/** Returns all the elements of this `Stream` that satisfy the predicate `p`
* in a new `Stream` - i.e., it is still a lazy data structure. The order of
* the elements is preserved
@@ -512,67 +522,11 @@ self =>
* // produces
* }}}
*/
- override def filter(p: A => Boolean): Stream[A] = {
- // optimization: drop leading prefix of elems for which f returns false
- // var rest = this dropWhile (!p(_)) - forget DRY principle - GC can't collect otherwise
- var rest = this
- while (!rest.isEmpty && !p(rest.head)) rest = rest.tail
- // private utility func to avoid `this` on stack (would be needed for the lazy arg)
- if (rest.nonEmpty) Stream.filteredTail(rest, p)
- else Stream.Empty
- }
-
- override final def withFilter(p: A => Boolean): StreamWithFilter = new StreamWithFilter(p)
-
- /** A lazier implementation of WithFilter than TraversableLike's.
- */
- final class StreamWithFilter(p: A => Boolean) extends WithFilter(p) {
-
- override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That = {
- def tailMap(coll: Stream[A]): Stream[B] = {
- var head: A = null.asInstanceOf[A]
- var tail: Stream[A] = coll
- while (true) {
- if (tail.isEmpty)
- return Stream.Empty
- head = tail.head
- tail = tail.tail
- if (p(head))
- return cons(f(head), tailMap(tail))
- }
- throw new RuntimeException()
- }
-
- if (isStreamBuilder(bf)) asThat(tailMap(Stream.this))
- else super.map(f)(bf)
- }
+ override def filter(p: A => Boolean): Stream[A] = filterImpl(p, isFlipped = false) // This override is only left in 2.11 because of binary compatibility, see PR #3925
- override def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That = {
- def tailFlatMap(coll: Stream[A]): Stream[B] = {
- var head: A = null.asInstanceOf[A]
- var tail: Stream[A] = coll
- while (true) {
- if (tail.isEmpty)
- return Stream.Empty
- head = tail.head
- tail = tail.tail
- if (p(head))
- return f(head).toStream append tailFlatMap(tail)
- }
- throw new RuntimeException()
- }
-
- if (isStreamBuilder(bf)) asThat(tailFlatMap(Stream.this))
- else super.flatMap(f)(bf)
- }
-
- override def foreach[B](f: A => B) =
- for (x <- self)
- if (p(x)) f(x)
-
- override def withFilter(q: A => Boolean): StreamWithFilter =
- new StreamWithFilter(x => p(x) && q(x))
- }
+ /** A FilterMonadic which allows GC of the head of stream during processing */
+ @noinline // Workaround SI-9137, see https://github.com/scala/scala/pull/4284#issuecomment-73180791
+ override final def withFilter(p: A => Boolean): FilterMonadic[A, Stream[A]] = new Stream.StreamWithFilter(this, p)
/** A lazier Iterator than LinearSeqLike's. */
override def iterator: Iterator[A] = new StreamIterator(self)
@@ -1289,13 +1243,36 @@ object Stream extends SeqFactory[Stream] {
else cons(start, range(start + step, end, step))
}
- private[immutable] def filteredTail[A](stream: Stream[A], p: A => Boolean) = {
- cons(stream.head, stream.tail filter p)
+ private[immutable] def filteredTail[A](stream: Stream[A], p: A => Boolean, isFlipped: Boolean) = {
+ cons(stream.head, stream.tail.filterImpl(p, isFlipped))
}
private[immutable] def collectedTail[A, B, That](head: B, stream: Stream[A], pf: PartialFunction[A, B], bf: CanBuildFrom[Stream[A], B, That]) = {
cons(head, stream.tail.collect(pf)(bf).asInstanceOf[Stream[B]])
}
-}
+ /** An implementation of `FilterMonadic` allowing GC of the filtered-out elements of
+ * the `Stream` as it is processed.
+ *
+ * Because this is not an inner class of `Stream` with a reference to the original
+ * head, it is now possible for GC to collect any leading and filtered-out elements
+ * which do not satisfy the filter, while the tail is still processing (see SI-8990).
+ */
+ private[immutable] final class StreamWithFilter[A](sl: => Stream[A], p: A => Boolean) extends FilterMonadic[A, Stream[A]] {
+ private var s = sl // set to null to allow GC after filtered
+ private lazy val filtered = { val f = s filter p; s = null; f } // don't set to null if throw during filter
+
+ def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That =
+ filtered map f
+
+ def flatMap[B, That](f: A => scala.collection.GenTraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That =
+ filtered flatMap f
+ def foreach[U](f: A => U): Unit =
+ filtered foreach f
+
+ def withFilter(q: A => Boolean): FilterMonadic[A, Stream[A]] =
+ new StreamWithFilter[A](filtered, q)
+ }
+
+}
diff --git a/src/library/scala/collection/immutable/StreamViewLike.scala b/src/library/scala/collection/immutable/StreamViewLike.scala
index c2eb85815d..4d7eaeff2a 100644
--- a/src/library/scala/collection/immutable/StreamViewLike.scala
+++ b/src/library/scala/collection/immutable/StreamViewLike.scala
@@ -53,6 +53,7 @@ extends SeqView[A, Coll]
/** boilerplate */
protected override def newForced[B](xs: => scala.collection.GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
protected override def newAppended[B >: A](that: scala.collection.GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
+ protected override def newPrepended[B >: A](that: scala.collection.GenTraversable[B]): Transformed[B] = new { protected[this] val fst = that } with AbstractTransformed[B] with Prepended[B]
protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
protected override def newFlatMapped[B](f: A => scala.collection.GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
@@ -67,7 +68,6 @@ extends SeqView[A, Coll]
protected override def newPatched[B >: A](_from: Int, _patch: scala.collection.GenSeq[B], _replaced: Int): Transformed[B] = {
new { val from = _from; val patch = _patch; val replaced = _replaced } with AbstractTransformed[B] with Patched[B]
}
- protected override def newPrepended[B >: A](elem: B): Transformed[B] = new { protected[this] val fst = elem } with AbstractTransformed[B] with Prepended[B]
override def stringPrefix = "StreamView"
}
diff --git a/src/library/scala/collection/mutable/AnyRefMap.scala b/src/library/scala/collection/mutable/AnyRefMap.scala
index fccc9d83e6..ed6ca1939d 100644
--- a/src/library/scala/collection/mutable/AnyRefMap.scala
+++ b/src/library/scala/collection/mutable/AnyRefMap.scala
@@ -335,6 +335,24 @@ extends AbstractMap[K, V]
arm
}
+ override def +[V1 >: V](kv: (K, V1)): AnyRefMap[K, V1] = {
+ val arm = clone().asInstanceOf[AnyRefMap[K, V1]]
+ arm += kv
+ arm
+ }
+
+ override def ++[V1 >: V](xs: GenTraversableOnce[(K, V1)]): AnyRefMap[K, V1] = {
+ val arm = clone().asInstanceOf[AnyRefMap[K, V1]]
+ xs.foreach(kv => arm += kv)
+ arm
+ }
+
+ override def updated[V1 >: V](key: K, value: V1): AnyRefMap[K, V1] = {
+ val arm = clone().asInstanceOf[AnyRefMap[K, V1]]
+ arm += (key, value)
+ arm
+ }
+
private[this] def foreachElement[A,B](elems: Array[AnyRef], f: A => B) {
var i,j = 0
while (i < _hashes.length & j < _size) {
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index 00491ef20e..2bc41b5802 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -40,9 +40,8 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza
arrayElementClass(repr.getClass)
override def copyToArray[U >: T](xs: Array[U], start: Int, len: Int) {
- var l = math.min(len, repr.length)
- if (xs.length - start < l) l = xs.length - start max 0
- Array.copy(repr, 0, xs, start, l)
+ val l = len min repr.length min (xs.length - start)
+ if (l > 0) Array.copy(repr, 0, xs, start, l)
}
override def toArray[U >: T : ClassTag]: Array[U] = {
diff --git a/src/library/scala/collection/mutable/ArraySeq.scala b/src/library/scala/collection/mutable/ArraySeq.scala
index 577a838315..5a50f4fb27 100644
--- a/src/library/scala/collection/mutable/ArraySeq.scala
+++ b/src/library/scala/collection/mutable/ArraySeq.scala
@@ -87,7 +87,7 @@ extends AbstractSeq[A]
*/
override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) {
val len1 = len min (xs.length - start) min length
- Array.copy(array, 0, xs, start, len1)
+ if (len1 > 0) Array.copy(array, 0, xs, start, len1)
}
override def clone(): ArraySeq[A] = {
diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala
index 3c57387c03..8d24538620 100644
--- a/src/library/scala/collection/mutable/BufferLike.scala
+++ b/src/library/scala/collection/mutable/BufferLike.scala
@@ -211,13 +211,6 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
*/
override def stringPrefix: String = "Buffer"
- /** Returns the current evolving(!) state of this buffer as a read-only sequence.
- *
- * @return A sequence that forwards to this buffer for all its operations.
- */
- @deprecated("The returned sequence changes as this buffer is mutated. For an immutable copy, use, e.g., toList.", "2.11.0")
- def readOnly: scala.collection.Seq[A] = toSeq
-
/** Creates a new collection containing both the elements of this collection and the provided
* traversable object.
*
diff --git a/src/library/scala/collection/mutable/BufferProxy.scala b/src/library/scala/collection/mutable/BufferProxy.scala
index d9632cce91..2d52831d37 100644
--- a/src/library/scala/collection/mutable/BufferProxy.scala
+++ b/src/library/scala/collection/mutable/BufferProxy.scala
@@ -43,8 +43,6 @@ trait BufferProxy[A] extends Buffer[A] with Proxy {
*/
def +=(elem: A): this.type = { self.+=(elem); this }
- override def readOnly = self.readOnly
-
/** Appends a number of elements provided by a traversable object.
*
* @param xs the traversable object.
diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala
index 8faaf97741..1906c47f61 100644
--- a/src/library/scala/collection/mutable/ListBuffer.scala
+++ b/src/library/scala/collection/mutable/ListBuffer.scala
@@ -12,7 +12,7 @@ package mutable
import generic._
import immutable.{List, Nil, ::}
-import java.io._
+import java.io.{ObjectOutputStream, ObjectInputStream}
import scala.annotation.migration
/** A `Buffer` implementation back up by a list. It provides constant time
@@ -408,9 +408,6 @@ final class ListBuffer[A]
}
}
- @deprecated("The result of this method will change along with this buffer, which is often not what's expected.", "2.11.0")
- override def readOnly: List[A] = start
-
// Private methods
/** Copy contents of this buffer */
@@ -426,7 +423,7 @@ final class ListBuffer[A]
}
override def equals(that: Any): Boolean = that match {
- case that: ListBuffer[_] => this.readOnly equals that.readOnly
+ case that: ListBuffer[_] => this.start equals that.start
case _ => super.equals(that)
}
diff --git a/src/library/scala/collection/mutable/LongMap.scala b/src/library/scala/collection/mutable/LongMap.scala
index c124f35cd7..1eb12d817c 100644
--- a/src/library/scala/collection/mutable/LongMap.scala
+++ b/src/library/scala/collection/mutable/LongMap.scala
@@ -415,6 +415,24 @@ extends AbstractMap[Long, V]
lm
}
+ override def +[V1 >: V](kv: (Long, V1)): LongMap[V1] = {
+ val lm = clone().asInstanceOf[LongMap[V1]]
+ lm += kv
+ lm
+ }
+
+ override def ++[V1 >: V](xs: GenTraversableOnce[(Long, V1)]): LongMap[V1] = {
+ val lm = clone().asInstanceOf[LongMap[V1]]
+ xs.foreach(kv => lm += kv)
+ lm
+ }
+
+ override def updated[V1 >: V](key: Long, value: V1): LongMap[V1] = {
+ val lm = clone().asInstanceOf[LongMap[V1]]
+ lm += (key, value)
+ lm
+ }
+
/** Applies a function to all keys of this map. */
def foreachKey[A](f: Long => A) {
if ((extraKeys & 1) == 1) f(0L)
@@ -541,7 +559,7 @@ object LongMap {
/** Creates a new `LongMap` from keys and values.
* Equivalent to but more efficient than `LongMap((keys zip values): _*)`.
*/
- def fromZip[V](keys: Iterable[Long], values: Iterable[V]): LongMap[V] = {
+ def fromZip[V](keys: collection.Iterable[Long], values: collection.Iterable[V]): LongMap[V] = {
val sz = math.min(keys.size, values.size)
val lm = new LongMap[V](sz * 2)
val ki = keys.iterator
diff --git a/src/library/scala/collection/mutable/MapLike.scala b/src/library/scala/collection/mutable/MapLike.scala
index 44af886cf5..949e5e3536 100644
--- a/src/library/scala/collection/mutable/MapLike.scala
+++ b/src/library/scala/collection/mutable/MapLike.scala
@@ -61,6 +61,18 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
override protected[this] def newBuilder: Builder[(A, B), This] = empty
protected[this] override def parCombiner = ParMap.newCombiner[A, B]
+
+ /** Converts this $coll to a sequence.
+ *
+ * ```Note```: assumes a fast `size` method. Subclasses should override if this is not true.
+ */
+ override def toSeq: collection.Seq[(A, B)] = {
+ // ArrayBuffer for efficiency, preallocated to the right size.
+ val result = new ArrayBuffer[(A, B)](size)
+ foreach(result += _)
+ result
+ }
+
/** Adds a new key/value pair to this map and optionally returns previously bound value.
* If the map already contains a
diff --git a/src/library/scala/collection/mutable/ResizableArray.scala b/src/library/scala/collection/mutable/ResizableArray.scala
index c3047522e2..85a299216e 100644
--- a/src/library/scala/collection/mutable/ResizableArray.scala
+++ b/src/library/scala/collection/mutable/ResizableArray.scala
@@ -74,7 +74,7 @@ trait ResizableArray[A] extends IndexedSeq[A]
*/
override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) {
val len1 = len min (xs.length - start) min length
- Array.copy(array, 0, xs, start, len1)
+ if (len1 > 0) Array.copy(array, 0, xs, start, len1)
}
//##########################################################################
diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala
index 81a71adc91..40a5c93064 100644
--- a/src/library/scala/collection/mutable/SetLike.scala
+++ b/src/library/scala/collection/mutable/SetLike.scala
@@ -72,6 +72,17 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
protected[this] override def parCombiner = ParSet.newCombiner[A]
+ /** Converts this $coll to a sequence.
+ *
+ * ```Note```: assumes a fast `size` method. Subclasses should override if this is not true.
+ */
+ override def toSeq: collection.Seq[A] = {
+ // ArrayBuffer for efficiency, preallocated to the right size.
+ val result = new ArrayBuffer[A](size)
+ foreach(result += _)
+ result
+ }
+
/** Adds an element to this $coll.
*
* @param elem the element to be added
diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala
index 5f2ceac0e0..7bb278b038 100644
--- a/src/library/scala/collection/parallel/RemainsIterator.scala
+++ b/src/library/scala/collection/parallel/RemainsIterator.scala
@@ -456,6 +456,15 @@ self =>
}
it
}
+ /** Drop implemented as simple eager consumption. */
+ override def drop(n: Int): IterableSplitter[T] = {
+ var i = 0
+ while (i < n && hasNext) {
+ next()
+ i += 1
+ }
+ this
+ }
override def take(n: Int): IterableSplitter[T] = newTaken(n)
override def slice(from1: Int, until1: Int): IterableSplitter[T] = newSliceInternal(newTaken(until1), from1)
diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
index 65a632470e..3a1ec7fff8 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
@@ -197,7 +197,7 @@ extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetC
while (i < chunksz) {
val v = chunkarr(i).asInstanceOf[T]
val hc = trie.computeHash(v)
- trie = trie.updated0(v, hc, rootbits)
+ trie = trie.updated0(v, hc, rootbits) // internal API, private[collection]
i += 1
}
i = 0
diff --git a/src/library/scala/concurrent/BlockContext.scala b/src/library/scala/concurrent/BlockContext.scala
index 747cc393c3..2b8ed4c7ca 100644
--- a/src/library/scala/concurrent/BlockContext.scala
+++ b/src/library/scala/concurrent/BlockContext.scala
@@ -41,7 +41,7 @@ package scala.concurrent
trait BlockContext {
/** Used internally by the framework;
- * Designates (and eventually executes) a thunk which potentially blocks the calling `Thread`.
+ * Designates (and eventually executes) a thunk which potentially blocks the calling `java.lang.Thread`.
*
* Clients must use `scala.concurrent.blocking` or `scala.concurrent.Await` instead.
*/
@@ -53,9 +53,16 @@ object BlockContext {
override def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T = thunk
}
+ /**
+ * @return the `BlockContext` that will be used if no other is found.
+ **/
+ def defaultBlockContext: BlockContext = DefaultBlockContext
+
private val contextLocal = new ThreadLocal[BlockContext]()
- /** Obtain the current thread's current `BlockContext`. */
+ /**
+ @return the `BlockContext` that would be used for the current `java.lang.Thread` at this point
+ **/
def current: BlockContext = contextLocal.get match {
case null => Thread.currentThread match {
case ctx: BlockContext => ctx
@@ -64,7 +71,9 @@ object BlockContext {
case some => some
}
- /** Pushes a current `BlockContext` while executing `body`. */
+ /**
+ * Installs a current `BlockContext` around executing `body`.
+ **/
def withBlockContext[T](blockContext: BlockContext)(body: => T): T = {
val old = contextLocal.get // can be null
try {
diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala
index e380c55880..df2d68c9c6 100644
--- a/src/library/scala/concurrent/ExecutionContext.scala
+++ b/src/library/scala/concurrent/ExecutionContext.scala
@@ -72,22 +72,24 @@ trait ExecutionContext {
*/
def reportFailure(@deprecatedName('t) cause: Throwable): Unit
- /** Prepares for the execution of a task. Returns the prepared execution context.
- *
- * `prepare` should be called at the site where an `ExecutionContext` is received (for
- * example, through an implicit method parameter). The returned execution context may
- * then be used to execute tasks. The role of `prepare` is to save any context relevant
- * to an execution's ''call site'', so that this context may be restored at the
- * ''execution site''. (These are often different: for example, execution may be
- * suspended through a `Promise`'s future until the `Promise` is completed, which may
- * be done in another thread, on another stack.)
- *
- * Note: a valid implementation of `prepare` is one that simply returns `this`.
- *
- * @return the prepared execution context
- */
+ /** Prepares for the execution of a task. Returns the prepared
+ * execution context. The recommended implementation of
+ * `prepare` is to return `this`.
+ *
+ * This method should no longer be overridden or called. It was
+ * originally expected that `prepare` would be called by
+ * all libraries that consume ExecutionContexts, in order to
+ * capture thread local context. However, this usage has proven
+ * difficult to implement in practice and instead it is
+ * now better to avoid using `prepare` entirely.
+ *
+ * Instead, if an `ExecutionContext` needs to capture thread
+ * local context, it should capture that context when it is
+ * constructed, so that it doesn't need any additional
+ * preparation later.
+ */
+ @deprecated("Preparation of ExecutionContexts will be removed.", "2.12")
def prepare(): ExecutionContext = this
-
}
/**
@@ -116,7 +118,7 @@ object ExecutionContext {
*
* @return the global `ExecutionContext`
*/
- def global: ExecutionContextExecutor = Implicits.global
+ def global: ExecutionContextExecutor = Implicits.global.asInstanceOf[ExecutionContextExecutor]
object Implicits {
/**
@@ -127,7 +129,7 @@ object ExecutionContext {
* the thread pool uses a target number of worker threads equal to the number of
* [[https://docs.oracle.com/javase/8/docs/api/java/lang/Runtime.html#availableProcessors-- available processors]].
*/
- implicit lazy val global: ExecutionContextExecutor = impl.ExecutionContextImpl.fromExecutor(null: Executor)
+ implicit lazy val global: ExecutionContext = impl.ExecutionContextImpl.fromExecutor(null: Executor)
}
/** Creates an `ExecutionContext` from the given `ExecutorService`.
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index 914646320c..6304f35da9 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -10,26 +10,22 @@ package scala.concurrent
import scala.language.higherKinds
-import java.util.concurrent.{ ConcurrentLinkedQueue, TimeUnit, Callable }
+import java.util.concurrent.{ CountDownLatch, TimeUnit, Callable }
import java.util.concurrent.TimeUnit.{ NANOSECONDS => NANOS, MILLISECONDS ⇒ MILLIS }
-import java.lang.{ Iterable => JIterable }
-import java.util.{ LinkedList => JLinkedList }
-import java.util.concurrent.atomic.{ AtomicReferenceFieldUpdater, AtomicInteger, AtomicLong, AtomicBoolean }
+import java.util.concurrent.atomic.AtomicInteger
import scala.util.control.NonFatal
-import scala.Option
import scala.util.{Try, Success, Failure}
-
+import scala.concurrent.duration._
import scala.annotation.tailrec
import scala.collection.mutable.Builder
import scala.collection.generic.CanBuildFrom
import scala.reflect.ClassTag
-
/** The trait that represents futures.
*
- * Asynchronous computations that yield futures are created with the `Future` call:
+ * Asynchronous computations that yield futures are created with the `Future.apply` call:
*
* {{{
* val s = "Hello"
@@ -60,6 +56,10 @@ import scala.reflect.ClassTag
* If a future is failed with a `scala.runtime.NonLocalReturnControl`,
* it is completed with a value from that throwable instead.
*
+ * @define swallowsExceptions
+ * Since this method executes asynchronously and does not produce a return value,
+ * any non-fatal exceptions thrown will be reported to the `ExecutionContext`.
+ *
* @define nonDeterministic
* Note: using this method yields nondeterministic dataflow programs.
*
@@ -91,14 +91,7 @@ import scala.reflect.ClassTag
* `execute()` either immediately or asynchronously.
*/
trait Future[+T] extends Awaitable[T] {
-
- // The executor within the lexical scope
- // of the Future trait. Note that this will
- // (modulo bugs) _never_ execute a callback
- // other than those below in this same file.
- //
- // See the documentation on `InternalCallbackExecutor` for more details.
- private def internalExecutor = Future.InternalCallbackExecutor
+ import Future.{ InternalCallbackExecutor => internalExecutor }
/* Callbacks */
@@ -109,9 +102,11 @@ trait Future[+T] extends Awaitable[T] {
* If the future has already been completed with a value,
* this will either be applied immediately or be scheduled asynchronously.
*
+ * $swallowsExceptions
* $multipleCallbacks
* $callbackInContext
*/
+ @deprecated("use `foreach` or `onComplete` instead (keep in mind that they take total rather than partial functions)", "2.12")
def onSuccess[U](pf: PartialFunction[T, U])(implicit executor: ExecutionContext): Unit = onComplete {
case Success(v) =>
pf.applyOrElse[T, Any](v, Predef.conforms[T]) // Exploiting the cached function to avoid MatchError
@@ -128,9 +123,11 @@ trait Future[+T] extends Awaitable[T] {
*
* Will not be called in case that the future is completed with a value.
*
+ * $swallowsExceptions
* $multipleCallbacks
* $callbackInContext
*/
+ @deprecated("use `onComplete` or `failed.foreach` instead (keep in mind that they take total rather than partial functions)", "2.12")
def onFailure[U](@deprecatedName('callback) pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = onComplete {
case Failure(t) =>
pf.applyOrElse[Throwable, Any](t, Predef.conforms[Throwable]) // Exploiting the cached function to avoid MatchError
@@ -143,8 +140,12 @@ trait Future[+T] extends Awaitable[T] {
* If the future has already been completed,
* this will either be applied immediately or be scheduled asynchronously.
*
+ * $swallowsExceptions
* $multipleCallbacks
* $callbackInContext
+ *
+ * @tparam U only used to accept any return type of the given callback function
+ * @param f the function to be executed when this `Future` completes
*/
def onComplete[U](@deprecatedName('func) f: Try[T] => U)(implicit executor: ExecutionContext): Unit
@@ -160,46 +161,47 @@ trait Future[+T] extends Awaitable[T] {
*/
def isCompleted: Boolean
- /** The value of this `Future`.
+ /** The current value of this `Future`.
+ *
+ * $nonDeterministic
*
* If the future is not completed the returned value will be `None`.
* If the future is completed the value will be `Some(Success(t))`
* if it contains a valid result, or `Some(Failure(error))` if it contains
* an exception.
+ *
+ * @return `None` if the `Future` wasn't completed, `Some` if it was.
*/
def value: Option[Try[T]]
/* Projections */
- /** Returns a failed projection of this future.
- *
- * The failed projection is a future holding a value of type `Throwable`.
+ /** The returned `Future` will be successfully completed with the `Throwable` of the original `Future`
+ * if the original `Future` fails.
*
- * It is completed with a value which is the throwable of the original future
- * in case the original future is failed.
+ * If the original `Future` is successful, the returned `Future` is failed with a `NoSuchElementException`.
*
- * It is failed with a `NoSuchElementException` if the original future is completed successfully.
- *
- * Blocking on this future returns a value if the original future is completed with an exception
- * and throws a corresponding exception if the original future fails.
+ * @return a failed projection of this `Future`.
*/
- def failed: Future[Throwable] = {
- implicit val ec = internalExecutor
- val p = Promise[Throwable]()
- onComplete {
- case Failure(t) => p success t
- case Success(v) => p failure (new NoSuchElementException("Future.failed not completed with a throwable."))
- }
- p.future
- }
+ def failed: Future[Throwable] =
+ transform({
+ case Failure(t) => Success(t)
+ case Success(v) => Failure(new NoSuchElementException("Future.failed not completed with a throwable."))
+ })(internalExecutor)
/* Monadic operations */
/** Asynchronously processes the value in the future once the value becomes available.
*
- * Will not be called if the future fails.
+ * WARNING: Will not be called if this future is never completed or if it is completed with a failure.
+ *
+ * $swallowsExceptions
+ *
+ * @tparam U only used to accept any return type of the given callback function
+ * @param f the function which will be executed if this `Future` completes with a result,
+ * the return value of `f` will be discarded.
*/
def foreach[U](f: T => U)(implicit executor: ExecutionContext): Unit = onComplete { _ foreach f }
@@ -208,33 +210,49 @@ trait Future[+T] extends Awaitable[T] {
* exception thrown when 's' or 'f' is applied, that exception will be propagated
* to the resulting future.
*
- * @param s function that transforms a successful result of the receiver into a
- * successful result of the returned future
- * @param f function that transforms a failure of the receiver into a failure of
- * the returned future
- * @return a future that will be completed with the transformed value
- */
- def transform[S](s: T => S, f: Throwable => Throwable)(implicit executor: ExecutionContext): Future[S] = {
- val p = Promise[S]()
- // transform on Try has the wrong shape for us here
- onComplete {
- case Success(r) => p complete Try(s(r))
- case Failure(t) => p complete Try(throw f(t)) // will throw fatal errors!
+ * @tparam S the type of the returned `Future`
+ * @param s function that transforms a successful result of the receiver into a successful result of the returned future
+ * @param f function that transforms a failure of the receiver into a failure of the returned future
+ * @return a `Future` that will be completed with the transformed value
+ */
+ def transform[S](s: T => S, f: Throwable => Throwable)(implicit executor: ExecutionContext): Future[S] =
+ transform {
+ case Success(r) => Try(s(r))
+ case Failure(t) => Try(throw f(t)) // will throw fatal errors!
}
- p.future
- }
+
+ /** Creates a new Future by applying the specified function to the result
+ * of this Future. If there is any non-fatal exception thrown when 'f'
+ * is applied then that exception will be propagated to the resulting future.
+ *
+ * @tparam S the type of the returned `Future`
+ * @param f function that transforms the result of this future
+ * @return a `Future` that will be completed with the transformed value
+ */
+ def transform[S](f: Try[T] => Try[S])(implicit executor: ExecutionContext): Future[S]
+
+ /** Creates a new Future by applying the specified function, which produces a Future, to the result
+ * of this Future. If there is any non-fatal exception thrown when 'f'
+ * is applied then that exception will be propagated to the resulting future.
+ *
+ * @tparam S the type of the returned `Future`
+ * @param f function that transforms the result of this future
+ * @return a `Future` that will be completed with the transformed value
+ */
+ def transformWith[S](f: Try[T] => Future[S])(implicit executor: ExecutionContext): Future[S]
+
/** Creates a new future by applying a function to the successful result of
* this future. If this future is completed with an exception then the new
* future will also contain this exception.
*
* $forComprehensionExamples
+ *
+ * @tparam S the type of the returned `Future`
+ * @param f the function which will be applied to the successful result of this `Future`
+ * @return a `Future` which will be completed with the result of the application of the function
*/
- def map[S](f: T => S)(implicit executor: ExecutionContext): Future[S] = { // transform(f, identity)
- val p = Promise[S]()
- onComplete { v => p complete (v map f) }
- p.future
- }
+ def map[S](f: T => S)(implicit executor: ExecutionContext): Future[S] = transform(_.map(f))
/** Creates a new future by applying a function to the successful result of
* this future, and returns the result of the function as the new future.
@@ -242,21 +260,23 @@ trait Future[+T] extends Awaitable[T] {
* also contain this exception.
*
* $forComprehensionExamples
+ *
+ * @tparam S the type of the returned `Future`
+ * @param f the function which will be applied to the successful result of this `Future`
+ * @return a `Future` which will be completed with the result of the application of the function
*/
- def flatMap[S](f: T => Future[S])(implicit executor: ExecutionContext): Future[S] = {
- import impl.Promise.DefaultPromise
- val p = new DefaultPromise[S]()
- onComplete {
- case f: Failure[_] => p complete f.asInstanceOf[Failure[S]]
- case Success(v) => try f(v) match {
- // If possible, link DefaultPromises to avoid space leaks
- case dp: DefaultPromise[_] => dp.asInstanceOf[DefaultPromise[S]].linkRootOf(p)
- case fut => fut.onComplete(p.complete)(internalExecutor)
- } catch { case NonFatal(t) => p failure t }
- }
- p.future
+ def flatMap[S](f: T => Future[S])(implicit executor: ExecutionContext): Future[S] = transformWith {
+ case Success(s) => f(s)
+ case Failure(_) => this.asInstanceOf[Future[S]]
}
+ /** Creates a new future with one level of nesting flattened, this method is equivalent
+ * to `flatMap(identity)`.
+ *
+ * @tparam S the type of the returned `Future`
+ */
+ def flatten[S](implicit ev: T <:< Future[S]): Future[S] = flatMap(ev)(internalExecutor)
+
/** Creates a new future by filtering the value of the current future with a predicate.
*
* If the current future contains a value which satisfies the predicate, the new future will also hold that value.
@@ -269,14 +289,15 @@ trait Future[+T] extends Awaitable[T] {
* val f = Future { 5 }
* val g = f filter { _ % 2 == 1 }
* val h = f filter { _ % 2 == 0 }
- * Await.result(g, Duration.Zero) // evaluates to 5
+ * g foreach println // Eventually prints 5
* Await.result(h, Duration.Zero) // throw a NoSuchElementException
* }}}
+ *
+ * @param p the predicate to apply to the successful result of this `Future`
+ * @return a `Future` which will hold the successful result of this `Future` if it matches the predicate or a `NoSuchElementException`
*/
def filter(@deprecatedName('pred) p: T => Boolean)(implicit executor: ExecutionContext): Future[T] =
- map {
- r => if (p(r)) r else throw new NoSuchElementException("Future.filter predicate is not satisfied")
- }
+ map { r => if (p(r)) r else throw new NoSuchElementException("Future.filter predicate is not satisfied") }
/** Used by for-comprehensions.
*/
@@ -298,9 +319,13 @@ trait Future[+T] extends Awaitable[T] {
* val h = f collect {
* case x if x > 0 => x * 2
* }
- * Await.result(g, Duration.Zero) // evaluates to 5
+ * g foreach println // Eventually prints 5
* Await.result(h, Duration.Zero) // throw a NoSuchElementException
* }}}
+ *
+ * @tparam S the type of the returned `Future`
+ *  @param pf the `PartialFunction` to apply to the successful result of this `Future`
+ * @return a `Future` holding the result of application of the `PartialFunction` or a `NoSuchElementException`
*/
def collect[S](pf: PartialFunction[T, S])(implicit executor: ExecutionContext): Future[S] =
map {
@@ -318,12 +343,13 @@ trait Future[+T] extends Awaitable[T] {
* Future (6 / 0) recover { case e: NotFoundException => 0 } // result: exception
* Future (6 / 2) recover { case e: ArithmeticException => 0 } // result: 3
* }}}
+ *
+ * @tparam U the type of the returned `Future`
+ * @param pf the `PartialFunction` to apply if this `Future` fails
+ * @return a `Future` with the successful value of this `Future` or the result of the `PartialFunction`
*/
- def recover[U >: T](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = {
- val p = Promise[U]()
- onComplete { v => p complete (v recover pf) }
- p.future
- }
+ def recover[U >: T](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] =
+ transform { _ recover pf }
/** Creates a new future that will handle any matching throwable that this
* future might contain by assigning it a value of another future.
@@ -337,15 +363,16 @@ trait Future[+T] extends Awaitable[T] {
* val f = Future { Int.MaxValue }
* Future (6 / 0) recoverWith { case e: ArithmeticException => f } // result: Int.MaxValue
* }}}
+ *
+ * @tparam U the type of the returned `Future`
+ * @param pf the `PartialFunction` to apply if this `Future` fails
+ * @return a `Future` with the successful value of this `Future` or the outcome of the `Future` returned by the `PartialFunction`
*/
- def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = {
- val p = Promise[U]()
- onComplete {
- case Failure(t) => try pf.applyOrElse(t, (_: Throwable) => this).onComplete(p.complete)(internalExecutor) catch { case NonFatal(t) => p failure t }
- case other => p complete other
+ def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] =
+ transformWith {
+ case Failure(t) => pf.applyOrElse(t, (_: Throwable) => this)
+ case Success(_) => this
}
- p.future
- }
/** Zips the values of `this` and `that` future, and creates
* a new future holding the tuple of their results.
@@ -354,17 +381,35 @@ trait Future[+T] extends Awaitable[T] {
* with the throwable stored in `this`.
* Otherwise, if `that` future fails, the resulting future is failed
* with the throwable stored in `that`.
+ *
+ * @tparam U the type of the other `Future`
+ * @param that the other `Future`
+ * @return a `Future` with the results of both futures or the failure of the first of them that failed
*/
def zip[U](that: Future[U]): Future[(T, U)] = {
implicit val ec = internalExecutor
- val p = Promise[(T, U)]()
- onComplete {
- case f: Failure[_] => p complete f.asInstanceOf[Failure[(T, U)]]
- case Success(s) => that onComplete { c => p.complete(c map { s2 => (s, s2) }) }
- }
- p.future
+ flatMap { r1 => that.map(r2 => (r1, r2)) }
}
+ /** Zips the values of `this` and `that` future using a function `f`,
+ * and creates a new future holding the result.
+ *
+ * If `this` future fails, the resulting future is failed
+ * with the throwable stored in `this`.
+ * Otherwise, if `that` future fails, the resulting future is failed
+ * with the throwable stored in `that`.
+ * If the application of `f` throws a throwable, the resulting future
+ * is failed with that throwable if it is non-fatal.
+ *
+ * @tparam U the type of the other `Future`
+ * @tparam R the type of the resulting `Future`
+ * @param that the other `Future`
+ * @param f the function to apply to the results of `this` and `that`
+ * @return a `Future` with the result of the application of `f` to the results of `this` and `that`
+ */
+ def zipWith[U, R](that: Future[U])(f: (T, U) => R)(implicit executor: ExecutionContext): Future[R] =
+ flatMap(r1 => that.map(r2 => f(r1, r2)))(internalExecutor)
+
/** Creates a new future which holds the result of this future if it was completed successfully, or, if not,
* the result of the `that` future if `that` is completed successfully.
* If both futures are failed, the resulting future holds the throwable object of the first future.
@@ -376,24 +421,26 @@ trait Future[+T] extends Awaitable[T] {
* val f = Future { sys.error("failed") }
* val g = Future { 5 }
* val h = f fallbackTo g
- * Await.result(h, Duration.Zero) // evaluates to 5
+ * h foreach println // Eventually prints 5
* }}}
+ *
+ * @tparam U the type of the other `Future` and the resulting `Future`
+ * @param that the `Future` whose result we want to use if this `Future` fails.
+ * @return a `Future` with the successful result of this or that `Future` or the failure of this `Future` if both fail
*/
- def fallbackTo[U >: T](that: Future[U]): Future[U] = {
- implicit val ec = internalExecutor
- val p = Promise[U]()
- onComplete {
- case s @ Success(_) => p complete s
- case f @ Failure(_) => that onComplete {
- case s2 @ Success(_) => p complete s2
- case _ => p complete f // Use the first failure as the failure
- }
+ def fallbackTo[U >: T](that: Future[U]): Future[U] =
+ if (this eq that) this
+ else {
+ implicit val ec = internalExecutor
+ recoverWith { case _ => that } recoverWith { case _ => this }
}
- p.future
- }
/** Creates a new `Future[S]` which is completed with this `Future`'s result if
* that conforms to `S`'s erased type or a `ClassCastException` otherwise.
+ *
+ * @tparam S the type of the returned `Future`
+ * @param tag the `ClassTag` which will be used to cast the result of this `Future`
+ * @return a `Future` holding the casted result of this `Future` or a `ClassCastException` otherwise
*/
def mapTo[S](implicit tag: ClassTag[S]): Future[S] = {
implicit val ec = internalExecutor
@@ -427,15 +474,19 @@ trait Future[+T] extends Awaitable[T] {
* case Success(v) => println(v)
* }
* }}}
+ *
+ * @tparam U only used to accept any return type of the given `PartialFunction`
+ * @param pf a `PartialFunction` which will be conditionally applied to the outcome of this `Future`
+ * @return a `Future` which will be completed with the exact same outcome as this `Future` but after the `PartialFunction` has been executed.
*/
- def andThen[U](pf: PartialFunction[Try[T], U])(implicit executor: ExecutionContext): Future[T] = {
- val p = Promise[T]()
- onComplete {
- case r => try pf.applyOrElse[Try[T], Any](r, Predef.conforms[Try[T]]) finally p complete r
- }
- p.future
- }
+ def andThen[U](pf: PartialFunction[Try[T], U])(implicit executor: ExecutionContext): Future[T] =
+ transform {
+ result =>
+ try pf.applyOrElse[Try[T], Any](result, Predef.conforms[Try[T]])
+ catch { case NonFatal(t) => executor reportFailure t }
+ result
+ }
}
@@ -459,40 +510,102 @@ object Future {
classOf[Unit] -> classOf[scala.runtime.BoxedUnit]
)
+ /** A Future which is never completed.
+ */
+ final object never extends Future[Nothing] {
+
+ @throws(classOf[TimeoutException])
+ @throws(classOf[InterruptedException])
+ override def ready(atMost: Duration)(implicit permit: CanAwait): this.type = {
+ atMost match {
+ case e if e eq Duration.Undefined => throw new IllegalArgumentException("cannot wait for Undefined period")
+ case Duration.Inf => new CountDownLatch(1).await()
+ case Duration.MinusInf => // Drop out
+ case f: FiniteDuration =>
+ if (f > Duration.Zero) new CountDownLatch(1).await(f.toNanos, TimeUnit.NANOSECONDS)
+ }
+ throw new TimeoutException(s"Future timed out after [$atMost]")
+ }
+
+ @throws(classOf[Exception])
+ override def result(atMost: Duration)(implicit permit: CanAwait): Nothing = {
+ ready(atMost)
+ throw new TimeoutException(s"Future timed out after [$atMost]")
+ }
+
+ override def onSuccess[U](pf: PartialFunction[Nothing, U])(implicit executor: ExecutionContext): Unit = ()
+ override def onFailure[U](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = ()
+ override def onComplete[U](f: Try[Nothing] => U)(implicit executor: ExecutionContext): Unit = ()
+ override def isCompleted: Boolean = false
+ override def value: Option[Try[Nothing]] = None
+ override def failed: Future[Throwable] = this
+ override def foreach[U](f: Nothing => U)(implicit executor: ExecutionContext): Unit = ()
+ override def transform[S](s: Nothing => S, f: Throwable => Throwable)(implicit executor: ExecutionContext): Future[S] = this
+ override def transform[S](f: Try[Nothing] => Try[S])(implicit executor: ExecutionContext): Future[S] = this
+ override def transformWith[S](f: Try[Nothing] => Future[S])(implicit executor: ExecutionContext): Future[S] = this
+ override def map[S](f: Nothing => S)(implicit executor: ExecutionContext): Future[S] = this
+ override def flatMap[S](f: Nothing => Future[S])(implicit executor: ExecutionContext): Future[S] = this
+ override def flatten[S](implicit ev: Nothing <:< Future[S]): Future[S] = this
+ override def filter(p: Nothing => Boolean)(implicit executor: ExecutionContext): Future[Nothing] = this
+ override def collect[S](pf: PartialFunction[Nothing, S])(implicit executor: ExecutionContext): Future[S] = this
+ override def recover[U >: Nothing](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = this
+ override def recoverWith[U >: Nothing](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = this
+ override def zip[U](that: Future[U]): Future[(Nothing, U)] = this
+ override def zipWith[U, R](that: Future[U])(f: (Nothing, U) => R)(implicit executor: ExecutionContext): Future[R] = this
+ override def fallbackTo[U >: Nothing](that: Future[U]): Future[U] = this
+ override def mapTo[S](implicit tag: ClassTag[S]): Future[S] = this
+ override def andThen[U](pf: PartialFunction[Try[Nothing], U])(implicit executor: ExecutionContext): Future[Nothing] = this
+
+ override def toString: String = "Future(<never>)"
+ }
+
+ /** A Future which is always completed with the Unit value.
+ */
+ val unit: Future[Unit] = successful(())
+
/** Creates an already completed Future with the specified exception.
*
- * @tparam T the type of the value in the future
- * @return the newly created `Future` object
+ * @tparam T the type of the value in the future
+ * @param exception the non-null instance of `Throwable`
+ * @return the newly created `Future` instance
*/
def failed[T](exception: Throwable): Future[T] = Promise.failed(exception).future
/** Creates an already completed Future with the specified result.
*
* @tparam T the type of the value in the future
- * @return the newly created `Future` object
+ * @param result the given successful value
+ * @return the newly created `Future` instance
*/
def successful[T](result: T): Future[T] = Promise.successful(result).future
/** Creates an already completed Future with the specified result or exception.
*
- * @tparam T the type of the value in the promise
- * @return the newly created `Future` object
+ * @tparam T the type of the value in the `Future`
+ * @param result the result of the returned `Future` instance
+ * @return the newly created `Future` instance
*/
def fromTry[T](result: Try[T]): Future[T] = Promise.fromTry(result).future
- /** Starts an asynchronous computation and returns a `Future` object with the result of that computation.
+ /** Starts an asynchronous computation and returns a `Future` instance with the result of that computation.
*
* The result becomes available once the asynchronous computation is completed.
*
- * @tparam T the type of the result
- * @param body the asynchronous computation
+ * @tparam T the type of the result
+ * @param body the asynchronous computation
* @param executor the execution context on which the future is run
- * @return the `Future` holding the result of the computation
+ * @return the `Future` holding the result of the computation
*/
- def apply[T](body: =>T)(implicit @deprecatedName('execctx) executor: ExecutionContext): Future[T] = impl.Future(body)
+ def apply[T](body: =>T)(implicit @deprecatedName('execctx) executor: ExecutionContext): Future[T] =
+ unit.map(_ => body)
- /** Simple version of `Future.traverse`. Transforms a `TraversableOnce[Future[A]]` into a `Future[TraversableOnce[A]]`.
- * Useful for reducing many `Future`s into a single `Future`.
+ /** Simple version of `Future.traverse`. Asynchronously and non-blockingly transforms a `TraversableOnce[Future[A]]`
+ * into a `Future[TraversableOnce[A]]`. Useful for reducing many `Future`s into a single `Future`.
+ *
+ * @tparam A the type of the value inside the Futures
+ * @tparam M the type of the `TraversableOnce` of Futures
+ * @param in the `TraversableOnce` of Futures which will be sequenced
+ * @return the `Future` of the `TraversableOnce` of results
*/
def sequence[A, M[X] <: TraversableOnce[X]](in: M[Future[A]])(implicit cbf: CanBuildFrom[M[Future[A]], A, M[A]], executor: ExecutionContext): Future[M[A]] = {
in.foldLeft(successful(cbf(in))) {
@@ -500,7 +613,12 @@ object Future {
} map (_.result())
}
- /** Returns a new `Future` to the result of the first future in the list that is completed.
+ /** Asynchronously and non-blockingly returns a new `Future` to the result of the first future
+ * in the list that is completed. This means no matter if it is completed as a success or as a failure.
+ *
+ * @tparam T the type of the value in the future
+ * @param futures the `TraversableOnce` of Futures in which to find the first completed
+ * @return the `Future` holding the result of the future that is first to be completed
*/
def firstCompletedOf[T](futures: TraversableOnce[Future[T]])(implicit executor: ExecutionContext): Future[T] = {
val p = Promise[T]()
@@ -509,8 +627,15 @@ object Future {
p.future
}
- /** Returns a `Future` that will hold the optional result of the first `Future` with a result that matches the predicate.
+ /** Asynchronously and non-blockingly returns a `Future` that will hold the optional result
+ * of the first `Future` with a result that matches the predicate.
+ *
+ * @tparam T the type of the value in the future
+ * @param futures the `TraversableOnce` of Futures to search
+ * @param p the predicate which indicates if it's a match
+ * @return the `Future` holding the optional result of the search
*/
+ @deprecated("Use the overloaded version of this method that takes a scala.collection.immutable.Iterable instead", "2.12")
def find[T](@deprecatedName('futurestravonce) futures: TraversableOnce[Future[T]])(@deprecatedName('predicate) p: T => Boolean)(implicit executor: ExecutionContext): Future[Option[T]] = {
val futuresBuffer = futures.toBuffer
if (futuresBuffer.isEmpty) successful[Option[T]](None)
@@ -534,40 +659,127 @@ object Future {
}
}
- /** A non-blocking fold over the specified futures, with the start value of the given zero.
+
+ /** Asynchronously and non-blockingly returns a `Future` that will hold the optional result
+ * of the first `Future` with a result that matches the predicate, failed `Future`s will be ignored.
+ *
+ * @tparam T the type of the value in the future
+ * @param futures the `scala.collection.immutable.Iterable` of Futures to search
+ * @param p the predicate which indicates if it's a match
+ * @return the `Future` holding the optional result of the search
+ */
+ def find[T](futures: scala.collection.immutable.Iterable[Future[T]])(p: T => Boolean)(implicit executor: ExecutionContext): Future[Option[T]] = {
+ def searchNext(i: Iterator[Future[T]]): Future[Option[T]] =
+ if (!i.hasNext) successful[Option[T]](None)
+ else {
+ i.next().transformWith {
+ case Success(r) if p(r) => successful(Some(r))
+ case other => searchNext(i)
+ }
+ }
+ searchNext(futures.iterator)
+ }
+
+ /** A non-blocking, asynchronous left fold over the specified futures,
+ * with the start value of the given zero.
+ * The fold is performed asynchronously in left-to-right order as the futures become completed.
+ * The result will be the first failure of any of the futures, or any failure in the actual fold,
+ * or the result of the fold.
+ *
+ * Example:
+ * {{{
+ * val futureSum = Future.foldLeft(futures)(0)(_ + _)
+ * }}}
+ *
+ * @tparam T the type of the value of the input Futures
+ * @tparam R the type of the value of the returned `Future`
+ * @param futures the `scala.collection.immutable.Iterable` of Futures to be folded
+ * @param zero the start value of the fold
+ * @param op the fold operation to be applied to the zero and futures
+ * @return the `Future` holding the result of the fold
+ */
+ def foldLeft[T, R](futures: scala.collection.immutable.Iterable[Future[T]])(zero: R)(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] =
+ foldNext(futures.iterator, zero, op)
+
+ private[this] def foldNext[T, R](i: Iterator[Future[T]], prevValue: R, op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] =
+ if (!i.hasNext) successful(prevValue)
+ else i.next().flatMap { value => foldNext(i, op(prevValue, value), op) }
+
+ /** A non-blocking, asynchronous fold over the specified futures, with the start value of the given zero.
* The fold is performed on the thread where the last future is completed,
* the result will be the first failure of any of the futures, or any failure in the actual fold,
* or the result of the fold.
*
* Example:
* {{{
- * val result = Await.result(Future.fold(futures)(0)(_ + _), 5 seconds)
+ * val futureSum = Future.fold(futures)(0)(_ + _)
* }}}
+ *
+ * @tparam T the type of the value of the input Futures
+ * @tparam R the type of the value of the returned `Future`
+ * @param futures the `TraversableOnce` of Futures to be folded
+ * @param zero the start value of the fold
+ * @param op the fold operation to be applied to the zero and futures
+ * @return the `Future` holding the result of the fold
*/
+ @deprecated("Use Future.foldLeft instead", "2.12")
def fold[T, R](futures: TraversableOnce[Future[T]])(zero: R)(@deprecatedName('foldFun) op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
if (futures.isEmpty) successful(zero)
else sequence(futures).map(_.foldLeft(zero)(op))
}
- /** Initiates a fold over the supplied futures where the fold-zero is the result value of the `Future` that's completed first.
+ /** Initiates a non-blocking, asynchronous, fold over the supplied futures
+ * where the fold-zero is the result value of the `Future` that's completed first.
*
* Example:
* {{{
- * val result = Await.result(Future.reduce(futures)(_ + _), 5 seconds)
+ * val futureSum = Future.reduce(futures)(_ + _)
* }}}
+ * @tparam T the type of the value of the input Futures
+ * @tparam R the type of the value of the returned `Future`
+ * @param futures the `TraversableOnce` of Futures to be reduced
+ * @param op the reduce operation which is applied to the results of the futures
+ * @return the `Future` holding the result of the reduce
*/
+ @deprecated("Use Future.reduceLeft instead", "2.12")
def reduce[T, R >: T](futures: TraversableOnce[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
if (futures.isEmpty) failed(new NoSuchElementException("reduce attempted on empty collection"))
else sequence(futures).map(_ reduceLeft op)
}
- /** Transforms a `TraversableOnce[A]` into a `Future[TraversableOnce[B]]` using the provided function `A => Future[B]`.
+ /** Initiates a non-blocking, asynchronous, left reduction over the supplied futures
+ * where the zero is the result value of the first `Future`.
+ *
+ * Example:
+ * {{{
+ * val futureSum = Future.reduceLeft(futures)(_ + _)
+ * }}}
+ * @tparam T the type of the value of the input Futures
+ * @tparam R the type of the value of the returned `Future`
+ * @param futures the `scala.collection.immutable.Iterable` of Futures to be reduced
+ * @param op the reduce operation which is applied to the results of the futures
+ * @return the `Future` holding the result of the reduce
+ */
+ def reduceLeft[T, R >: T](futures: scala.collection.immutable.Iterable[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
+ val i = futures.iterator
+ if (!i.hasNext) failed(new NoSuchElementException("reduceLeft attempted on empty collection"))
+ else i.next() flatMap { v => foldNext(i, v, op) }
+ }
+
+ /** Asynchronously and non-blockingly transforms a `TraversableOnce[A]` into a `Future[TraversableOnce[B]]`
+ * using the provided function `A => Future[B]`.
* This is useful for performing a parallel map. For example, to apply a function to all items of a list
* in parallel:
*
* {{{
* val myFutureList = Future.traverse(myList)(x => Future(myFunc(x)))
* }}}
+ * @tparam A the type of the value inside the Futures in the `TraversableOnce`
+ * @tparam B the type of the value of the returned `Future`
+ * @tparam M the type of the `TraversableOnce` of Futures
+ * @param in the `TraversableOnce` of Futures which will be sequenced
+ * @param fn the function to apply to the `TraversableOnce` of Futures to produce the results
+ * @return the `Future` of the `TraversableOnce` of results
*/
def traverse[A, B, M[X] <: TraversableOnce[X]](in: M[A])(fn: A => Future[B])(implicit cbf: CanBuildFrom[M[A], B, M[B]], executor: ExecutionContext): Future[M[B]] =
in.foldLeft(successful(cbf(in))) { (fr, a) =>
@@ -575,6 +787,7 @@ object Future {
for (r <- fr; b <- fb) yield (r += b)
}.map(_.result())
+
// This is used to run callbacks which are internal
// to scala.concurrent; our own callbacks are only
// ever used to eventually run another callback,
diff --git a/src/library/scala/concurrent/Promise.scala b/src/library/scala/concurrent/Promise.scala
index 0f4e98db57..894b134e83 100644
--- a/src/library/scala/concurrent/Promise.scala
+++ b/src/library/scala/concurrent/Promise.scala
@@ -26,12 +26,6 @@ import scala.util.{ Try, Success, Failure }
* Note: Using this method may result in non-deterministic concurrent programs.
*/
trait Promise[T] {
-
- // used for internal callbacks defined in
- // the lexical scope of this trait;
- // _never_ for application callbacks.
- private implicit def internalExecutor: ExecutionContext = Future.InternalCallbackExecutor
-
/** Future containing the value of this promise.
*/
def future: Future[T]
@@ -73,7 +67,9 @@ trait Promise[T] {
* @return This promise
*/
final def tryCompleteWith(other: Future[T]): this.type = {
- other onComplete { this tryComplete _ }
+ if (other ne this.future) { // this tryCompleteWith this doesn't make much sense
+ other.onComplete(this tryComplete _)(Future.InternalCallbackExecutor)
+ }
this
}
@@ -139,5 +135,5 @@ object Promise {
* @tparam T the type of the value in the promise
* @return the newly created `Promise` object
*/
- def fromTry[T](result: Try[T]): Promise[T] = new impl.Promise.KeptPromise[T](result)
+ def fromTry[T](result: Try[T]): Promise[T] = impl.Promise.KeptPromise[T](result)
}
diff --git a/src/library/scala/concurrent/duration/Duration.scala b/src/library/scala/concurrent/duration/Duration.scala
index 2eded9f060..3697950e2e 100644
--- a/src/library/scala/concurrent/duration/Duration.scala
+++ b/src/library/scala/concurrent/duration/Duration.scala
@@ -705,7 +705,7 @@ final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duratio
final def isFinite() = true
- final def toCoarsest: Duration = {
+ final override def toCoarsest: FiniteDuration = {
def loop(length: Long, unit: TimeUnit): FiniteDuration = {
def coarserOrThis(coarser: TimeUnit, divider: Int) =
if (length % divider == 0) loop(length / divider, coarser)
diff --git a/src/library/scala/concurrent/impl/AbstractPromise.java b/src/library/scala/concurrent/impl/AbstractPromise.java
deleted file mode 100644
index b8165b6cde..0000000000
--- a/src/library/scala/concurrent/impl/AbstractPromise.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.concurrent.impl;
-
-
-import scala.concurrent.util.Unsafe;
-import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
-
-
-
-abstract class AbstractPromise {
- private volatile Object _ref;
-
- final static long _refoffset;
-
- static {
- try {
- _refoffset = Unsafe.instance.objectFieldOffset(AbstractPromise.class.getDeclaredField("_ref"));
- } catch (Throwable t) {
- throw new ExceptionInInitializerError(t);
- }
- }
-
- protected final boolean updateState(Object oldState, Object newState) {
- return Unsafe.instance.compareAndSwapObject(this, _refoffset, oldState, newState);
- }
-
- protected final Object getState() {
- return _ref;
- }
-
- protected final static AtomicReferenceFieldUpdater<AbstractPromise, Object> updater =
- AtomicReferenceFieldUpdater.newUpdater(AbstractPromise.class, Object.class, "_ref");
-} \ No newline at end of file
diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
index 479720287c..0c7f98ce5a 100644
--- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
+++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
@@ -11,52 +11,88 @@ package scala.concurrent.impl
import java.util.concurrent.{ LinkedBlockingQueue, Callable, Executor, ExecutorService, Executors, ThreadFactory, TimeUnit, ThreadPoolExecutor }
+import java.util.concurrent.atomic.AtomicInteger
import java.util.Collection
import scala.concurrent.forkjoin._
import scala.concurrent.{ BlockContext, ExecutionContext, Awaitable, CanAwait, ExecutionContextExecutor, ExecutionContextExecutorService }
import scala.util.control.NonFatal
+import scala.annotation.tailrec
+private[scala] class ExecutionContextImpl private[impl] (val executor: Executor, val reporter: Throwable => Unit) extends ExecutionContextExecutor {
+ require(executor ne null, "Executor must not be null")
+ override def execute(runnable: Runnable) = executor execute runnable
+ override def reportFailure(t: Throwable) = reporter(t)
+}
-private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter: Throwable => Unit) extends ExecutionContextExecutor {
- // Placed here since the creation of the executor needs to read this val
- private[this] val uncaughtExceptionHandler: Thread.UncaughtExceptionHandler = new Thread.UncaughtExceptionHandler {
- def uncaughtException(thread: Thread, cause: Throwable): Unit = reporter(cause)
- }
- val executor: Executor = es match {
- case null => createExecutorService
- case some => some
- }
+private[concurrent] object ExecutionContextImpl {
// Implement BlockContext on FJP threads
- class DefaultThreadFactory(daemonic: Boolean) extends ThreadFactory with ForkJoinPool.ForkJoinWorkerThreadFactory {
+ final class DefaultThreadFactory(
+ daemonic: Boolean,
+ maxThreads: Int,
+ prefix: String,
+ uncaught: Thread.UncaughtExceptionHandler) extends ThreadFactory with ForkJoinPool.ForkJoinWorkerThreadFactory {
+
+ require(prefix ne null, "DefaultThreadFactory.prefix must be non null")
+ require(maxThreads > 0, "DefaultThreadFactory.maxThreads must be greater than 0")
+
+ private final val currentNumberOfThreads = new AtomicInteger(0)
+
+ @tailrec private final def reserveThread(): Boolean = currentNumberOfThreads.get() match {
+ case `maxThreads` | Int.`MaxValue` => false
+ case other => currentNumberOfThreads.compareAndSet(other, other + 1) || reserveThread()
+ }
+
+ @tailrec private final def deregisterThread(): Boolean = currentNumberOfThreads.get() match {
+ case 0 => false
+ case other => currentNumberOfThreads.compareAndSet(other, other - 1) || deregisterThread()
+ }
+
def wire[T <: Thread](thread: T): T = {
thread.setDaemon(daemonic)
- thread.setUncaughtExceptionHandler(uncaughtExceptionHandler)
+ thread.setUncaughtExceptionHandler(uncaught)
+ thread.setName(prefix + "-" + thread.getId())
thread
}
- def newThread(runnable: Runnable): Thread = wire(new Thread(runnable))
-
- def newThread(fjp: ForkJoinPool): ForkJoinWorkerThread = wire(new ForkJoinWorkerThread(fjp) with BlockContext {
- override def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T = {
- var result: T = null.asInstanceOf[T]
- ForkJoinPool.managedBlock(new ForkJoinPool.ManagedBlocker {
- @volatile var isdone = false
- override def block(): Boolean = {
- result = try thunk finally { isdone = true }
- true
+ // As per ThreadFactory contract newThread should return `null` if cannot create new thread.
+ def newThread(runnable: Runnable): Thread =
+ if (reserveThread())
+ wire(new Thread(new Runnable {
+ // We have to decrement the current thread count when the thread exits
+ override def run() = try runnable.run() finally deregisterThread()
+ })) else null
+
+ def newThread(fjp: ForkJoinPool): ForkJoinWorkerThread =
+ if (reserveThread()) {
+ wire(new ForkJoinWorkerThread(fjp) with BlockContext {
+ // We have to decrement the current thread count when the thread exits
+ final override def onTermination(exception: Throwable): Unit = deregisterThread()
+ final override def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T = {
+ var result: T = null.asInstanceOf[T]
+ ForkJoinPool.managedBlock(new ForkJoinPool.ManagedBlocker {
+ @volatile var isdone = false
+ override def block(): Boolean = {
+ result = try {
+ // When we block, switch out the BlockContext temporarily so that nested blocking does not created N new Threads
+ BlockContext.withBlockContext(BlockContext.defaultBlockContext) { thunk }
+ } finally {
+ isdone = true
+ }
+
+ true
+ }
+ override def isReleasable = isdone
+ })
+ result
}
- override def isReleasable = isdone
})
- result
- }
- })
+ } else null
}
- def createExecutorService: ExecutorService = {
-
+ def createDefaultExecutorService(reporter: Throwable => Unit): ExecutorService = {
def getInt(name: String, default: String) = (try System.getProperty(name, default) catch {
case e: SecurityException => default
}) match {
@@ -65,20 +101,42 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
}
def range(floor: Int, desired: Int, ceiling: Int) = scala.math.min(scala.math.max(floor, desired), ceiling)
+ val numThreads = getInt("scala.concurrent.context.numThreads", "x1")
+ // The hard limit on the number of active threads that the thread factory will produce
+ // SI-8955 Deadlocks can happen if maxNoOfThreads is too low, although we're currently not sure
+ // about what the exact threshhold is. numThreads + 256 is conservatively high.
+ val maxNoOfThreads = getInt("scala.concurrent.context.maxThreads", "x1")
val desiredParallelism = range(
getInt("scala.concurrent.context.minThreads", "1"),
- getInt("scala.concurrent.context.numThreads", "x1"),
- getInt("scala.concurrent.context.maxThreads", "x1"))
+ numThreads,
+ maxNoOfThreads)
+
+ // The thread factory must provide additional threads to support managed blocking.
+ val maxExtraThreads = getInt("scala.concurrent.context.maxExtraThreads", "256")
+
+ val uncaughtExceptionHandler: Thread.UncaughtExceptionHandler = new Thread.UncaughtExceptionHandler {
+ override def uncaughtException(thread: Thread, cause: Throwable): Unit = reporter(cause)
+ }
- val threadFactory = new DefaultThreadFactory(daemonic = true)
+ val threadFactory = new ExecutionContextImpl.DefaultThreadFactory(daemonic = true,
+ maxThreads = maxNoOfThreads + maxExtraThreads,
+ prefix = "scala-execution-context-global",
+ uncaught = uncaughtExceptionHandler)
try {
- new ForkJoinPool(
- desiredParallelism,
- threadFactory,
- uncaughtExceptionHandler,
- true) // Async all the way baby
+ new ForkJoinPool(desiredParallelism, threadFactory, uncaughtExceptionHandler, true) {
+ override def execute(runnable: Runnable): Unit = {
+ val fjt: ForkJoinTask[_] = runnable match {
+ case t: ForkJoinTask[_] => t
+ case r => new ExecutionContextImpl.AdaptedForkJoinTask(r)
+ }
+ Thread.currentThread match {
+ case fjw: ForkJoinWorkerThread if fjw.getPool eq this => fjt.fork()
+ case _ => super.execute(fjt)
+ }
+ }
+ }
} catch {
case NonFatal(t) =>
System.err.println("Failed to create ForkJoinPool for the default ExecutionContext, falling back to ThreadPoolExecutor")
@@ -96,56 +154,42 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
}
}
- def execute(runnable: Runnable): Unit = executor match {
- case fj: ForkJoinPool =>
- val fjt: ForkJoinTask[_] = runnable match {
- case t: ForkJoinTask[_] => t
- case r => new ExecutionContextImpl.AdaptedForkJoinTask(r)
- }
- Thread.currentThread match {
- case fjw: ForkJoinWorkerThread if fjw.getPool eq fj => fjt.fork()
- case _ => fj execute fjt
- }
- case generic => generic execute runnable
- }
-
- def reportFailure(t: Throwable) = reporter(t)
-}
-
-
-private[concurrent] object ExecutionContextImpl {
-
final class AdaptedForkJoinTask(runnable: Runnable) extends ForkJoinTask[Unit] {
- final override def setRawResult(u: Unit): Unit = ()
- final override def getRawResult(): Unit = ()
- final override def exec(): Boolean = try { runnable.run(); true } catch {
- case anything: Throwable ⇒
- val t = Thread.currentThread
- t.getUncaughtExceptionHandler match {
- case null ⇒
- case some ⇒ some.uncaughtException(t, anything)
- }
- throw anything
- }
+ final override def setRawResult(u: Unit): Unit = ()
+ final override def getRawResult(): Unit = ()
+ final override def exec(): Boolean = try { runnable.run(); true } catch {
+ case anything: Throwable =>
+ val t = Thread.currentThread
+ t.getUncaughtExceptionHandler match {
+ case null =>
+ case some => some.uncaughtException(t, anything)
}
+ throw anything
+ }
+ }
- def fromExecutor(e: Executor, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl = new ExecutionContextImpl(e, reporter)
- def fromExecutorService(es: ExecutorService, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl with ExecutionContextExecutorService =
- new ExecutionContextImpl(es, reporter) with ExecutionContextExecutorService {
- final def asExecutorService: ExecutorService = executor.asInstanceOf[ExecutorService]
- override def execute(command: Runnable) = executor.execute(command)
- override def shutdown() { asExecutorService.shutdown() }
- override def shutdownNow() = asExecutorService.shutdownNow()
- override def isShutdown = asExecutorService.isShutdown
- override def isTerminated = asExecutorService.isTerminated
- override def awaitTermination(l: Long, timeUnit: TimeUnit) = asExecutorService.awaitTermination(l, timeUnit)
- override def submit[T](callable: Callable[T]) = asExecutorService.submit(callable)
- override def submit[T](runnable: Runnable, t: T) = asExecutorService.submit(runnable, t)
- override def submit(runnable: Runnable) = asExecutorService.submit(runnable)
- override def invokeAll[T](callables: Collection[_ <: Callable[T]]) = asExecutorService.invokeAll(callables)
- override def invokeAll[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = asExecutorService.invokeAll(callables, l, timeUnit)
- override def invokeAny[T](callables: Collection[_ <: Callable[T]]) = asExecutorService.invokeAny(callables)
- override def invokeAny[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = asExecutorService.invokeAny(callables, l, timeUnit)
+ def fromExecutor(e: Executor, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl =
+ new ExecutionContextImpl(Option(e).getOrElse(createDefaultExecutorService(reporter)), reporter)
+
+ def fromExecutorService(es: ExecutorService, reporter: Throwable => Unit = ExecutionContext.defaultReporter):
+ ExecutionContextImpl with ExecutionContextExecutorService = {
+ new ExecutionContextImpl(Option(es).getOrElse(createDefaultExecutorService(reporter)), reporter)
+ with ExecutionContextExecutorService {
+ final def asExecutorService: ExecutorService = executor.asInstanceOf[ExecutorService]
+ override def execute(command: Runnable) = executor.execute(command)
+ override def shutdown() { asExecutorService.shutdown() }
+ override def shutdownNow() = asExecutorService.shutdownNow()
+ override def isShutdown = asExecutorService.isShutdown
+ override def isTerminated = asExecutorService.isTerminated
+ override def awaitTermination(l: Long, timeUnit: TimeUnit) = asExecutorService.awaitTermination(l, timeUnit)
+ override def submit[T](callable: Callable[T]) = asExecutorService.submit(callable)
+ override def submit[T](runnable: Runnable, t: T) = asExecutorService.submit(runnable, t)
+ override def submit(runnable: Runnable) = asExecutorService.submit(runnable)
+ override def invokeAll[T](callables: Collection[_ <: Callable[T]]) = asExecutorService.invokeAll(callables)
+ override def invokeAll[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = asExecutorService.invokeAll(callables, l, timeUnit)
+ override def invokeAny[T](callables: Collection[_ <: Callable[T]]) = asExecutorService.invokeAny(callables)
+ override def invokeAny[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = asExecutorService.invokeAny(callables, l, timeUnit)
+ }
}
}
diff --git a/src/library/scala/concurrent/impl/Future.scala b/src/library/scala/concurrent/impl/Future.scala
deleted file mode 100644
index 042d32c234..0000000000
--- a/src/library/scala/concurrent/impl/Future.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.concurrent.impl
-
-
-
-import scala.concurrent.ExecutionContext
-import scala.util.control.NonFatal
-import scala.util.{ Success, Failure }
-
-
-private[concurrent] object Future {
- class PromiseCompletingRunnable[T](body: => T) extends Runnable {
- val promise = new Promise.DefaultPromise[T]()
-
- override def run() = {
- promise complete {
- try Success(body) catch { case NonFatal(e) => Failure(e) }
- }
- }
- }
-
- def apply[T](body: =>T)(implicit executor: ExecutionContext): scala.concurrent.Future[T] = {
- val runnable = new PromiseCompletingRunnable(body)
- executor.prepare.execute(runnable)
- runnable.promise.future
- }
-}
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index b15601058e..078ad45be9 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -16,14 +16,42 @@ import scala.util.control.NonFatal
import scala.util.{ Try, Success, Failure }
import java.io.ObjectInputStream
import java.util.concurrent.locks.AbstractQueuedSynchronizer
+import java.util.concurrent.atomic.AtomicReference
private[concurrent] trait Promise[T] extends scala.concurrent.Promise[T] with scala.concurrent.Future[T] {
def future: this.type = this
+
+ import scala.concurrent.Future
+ import scala.concurrent.impl.Promise.DefaultPromise
+
+ override def transform[S](f: Try[T] => Try[S])(implicit executor: ExecutionContext): Future[S] = {
+ val p = new DefaultPromise[S]()
+ onComplete { result => p.complete(try f(result) catch { case NonFatal(t) => Failure(t) }) }
+ p.future
+ }
+
+ // If possible, link DefaultPromises to avoid space leaks
+ override def transformWith[S](f: Try[T] => Future[S])(implicit executor: ExecutionContext): Future[S] = {
+ val p = new DefaultPromise[S]()
+ onComplete {
+ v => try f(v) match {
+ case fut if fut eq this => p complete v.asInstanceOf[Try[S]]
+ case dp: DefaultPromise[_] => dp.asInstanceOf[DefaultPromise[S]].linkRootOf(p)
+ case fut => p completeWith fut
+ } catch { case NonFatal(t) => p failure t }
+ }
+ p.future
+ }
+
+ override def toString: String = value match {
+ case Some(result) => "Future("+result+")"
+ case None => "Future(<not completed>)"
+ }
}
/* Precondition: `executor` is prepared, i.e., `executor` has been returned from invocation of `prepare` on some other `ExecutionContext`.
*/
-private class CallbackRunnable[T](val executor: ExecutionContext, val onComplete: Try[T] => Any) extends Runnable with OnCompleteRunnable {
+private final class CallbackRunnable[T](val executor: ExecutionContext, val onComplete: Try[T] => Any) extends Runnable with OnCompleteRunnable {
// must be filled in before running it
var value: Try[T] = null
@@ -89,7 +117,7 @@ private[concurrent] object Promise {
* incomplete, or as complete with the same result value.
*
* A DefaultPromise stores its state entirely in the AnyRef cell exposed by
- * AbstractPromise. The type of object stored in the cell fully describes the
+ * AtomicReference. The type of object stored in the cell fully describes the
* current state of the promise.
*
* 1. List[CallbackRunnable] - The promise is incomplete and has zero or more callbacks
@@ -150,8 +178,7 @@ private[concurrent] object Promise {
* DefaultPromises, and `linkedRootOf` is currently only designed to be called
* by Future.flatMap.
*/
- class DefaultPromise[T] extends AbstractPromise with Promise[T] { self =>
- updateState(null, Nil) // The promise is incomplete and has no callbacks
+ final class DefaultPromise[T] extends AtomicReference[AnyRef](Nil) with Promise[T] {
/** Get the root promise for this promise, compressing the link chain to that
* promise if necessary.
@@ -167,14 +194,23 @@ private[concurrent] object Promise {
* be garbage collected. Also, subsequent calls to this method should be
* faster as the link chain will be shorter.
*/
- @tailrec
- private def compressedRoot(): DefaultPromise[T] = {
- getState match {
- case linked: DefaultPromise[_] =>
- val target = linked.asInstanceOf[DefaultPromise[T]].root
- if (linked eq target) target else if (updateState(linked, target)) target else compressedRoot()
+ private def compressedRoot(): DefaultPromise[T] =
+ get() match {
+ case linked: DefaultPromise[_] => compressedRoot(linked)
case _ => this
}
+
+ @tailrec
+ private[this] final def compressedRoot(linked: DefaultPromise[_]): DefaultPromise[T] = {
+ val target = linked.asInstanceOf[DefaultPromise[T]].root
+ if (linked eq target) target
+ else if (compareAndSet(linked, target)) target
+ else {
+ get() match {
+ case newLinked: DefaultPromise[_] => compressedRoot(newLinked)
+ case _ => this
+ }
+ }
}
/** Get the promise at the root of the chain of linked promises. Used by `compressedRoot()`.
@@ -182,18 +218,16 @@ private[concurrent] object Promise {
* to compress the link chain whenever possible.
*/
@tailrec
- private def root: DefaultPromise[T] = {
- getState match {
+ private def root: DefaultPromise[T] =
+ get() match {
case linked: DefaultPromise[_] => linked.asInstanceOf[DefaultPromise[T]].root
case _ => this
}
- }
/** Try waiting for this promise to be completed.
*/
protected final def tryAwait(atMost: Duration): Boolean = if (!isCompleted) {
import Duration.Undefined
- import scala.concurrent.Future.InternalCallbackExecutor
atMost match {
case e if e eq Undefined => throw new IllegalArgumentException("cannot wait for Undefined period")
case Duration.Inf =>
@@ -225,18 +259,18 @@ private[concurrent] object Promise {
def value: Option[Try[T]] = value0
@tailrec
- private def value0: Option[Try[T]] = getState match {
+ private def value0: Option[Try[T]] = get() match {
case c: Try[_] => Some(c.asInstanceOf[Try[T]])
- case _: DefaultPromise[_] => compressedRoot().value0
+ case dp: DefaultPromise[_] => compressedRoot(dp).value0
case _ => None
}
override def isCompleted: Boolean = isCompleted0
@tailrec
- private def isCompleted0: Boolean = getState match {
+ private def isCompleted0: Boolean = get() match {
case _: Try[_] => true
- case _: DefaultPromise[_] => compressedRoot().isCompleted0
+ case dp: DefaultPromise[_] => compressedRoot(dp).isCompleted0
case _ => false
}
@@ -254,21 +288,17 @@ private[concurrent] object Promise {
*/
@tailrec
private def tryCompleteAndGetListeners(v: Try[T]): List[CallbackRunnable[T]] = {
- getState match {
+ get() match {
case raw: List[_] =>
val cur = raw.asInstanceOf[List[CallbackRunnable[T]]]
- if (updateState(cur, v)) cur else tryCompleteAndGetListeners(v)
- case _: DefaultPromise[_] =>
- compressedRoot().tryCompleteAndGetListeners(v)
+ if (compareAndSet(cur, v)) cur else tryCompleteAndGetListeners(v)
+ case dp: DefaultPromise[_] => compressedRoot(dp).tryCompleteAndGetListeners(v)
case _ => null
}
}
- def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = {
- val preparedEC = executor.prepare()
- val runnable = new CallbackRunnable[T](preparedEC, func)
- dispatchOrAddCallback(runnable)
- }
+ def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit =
+ dispatchOrAddCallback(new CallbackRunnable[T](executor.prepare(), func))
/** Tries to add the callback, if already completed, it dispatches the callback to be executed.
* Used by `onComplete()` to add callbacks to a promise and by `link()` to transfer callbacks
@@ -276,15 +306,16 @@ private[concurrent] object Promise {
*/
@tailrec
private def dispatchOrAddCallback(runnable: CallbackRunnable[T]): Unit = {
- getState match {
+ get() match {
case r: Try[_] => runnable.executeWithValue(r.asInstanceOf[Try[T]])
- case _: DefaultPromise[_] => compressedRoot().dispatchOrAddCallback(runnable)
- case listeners: List[_] => if (updateState(listeners, runnable :: listeners)) () else dispatchOrAddCallback(runnable)
+ case dp: DefaultPromise[_] => compressedRoot(dp).dispatchOrAddCallback(runnable)
+ case listeners: List[_] => if (compareAndSet(listeners, runnable :: listeners)) ()
+ else dispatchOrAddCallback(runnable)
}
}
/** Link this promise to the root of another promise using `link()`. Should only be
- * be called by Future.flatMap.
+ * be called by transformWith.
*/
protected[concurrent] final def linkRootOf(target: DefaultPromise[T]): Unit = link(target.compressedRoot())
@@ -299,18 +330,17 @@ private[concurrent] object Promise {
*/
@tailrec
private def link(target: DefaultPromise[T]): Unit = if (this ne target) {
- getState match {
+ get() match {
case r: Try[_] =>
- if (!target.tryComplete(r.asInstanceOf[Try[T]])) {
- // Currently linking is done from Future.flatMap, which should ensure only
- // one promise can be completed. Therefore this situation is unexpected.
+ if (!target.tryComplete(r.asInstanceOf[Try[T]]))
throw new IllegalStateException("Cannot link completed promises together")
- }
- case _: DefaultPromise[_] =>
- compressedRoot().link(target)
- case listeners: List[_] => if (updateState(listeners, target)) {
- if (!listeners.isEmpty) listeners.asInstanceOf[List[CallbackRunnable[T]]].foreach(target.dispatchOrAddCallback(_))
- } else link(target)
+ case dp: DefaultPromise[_] =>
+ compressedRoot(dp).link(target)
+ case listeners: List[_] if compareAndSet(listeners, target) =>
+ if (listeners.nonEmpty)
+ listeners.asInstanceOf[List[CallbackRunnable[T]]].foreach(target.dispatchOrAddCallback(_))
+ case _ =>
+ link(target)
}
}
}
@@ -319,23 +349,58 @@ private[concurrent] object Promise {
*
* Useful in Future-composition when a value to contribute is already available.
*/
- final class KeptPromise[T](suppliedValue: Try[T]) extends Promise[T] {
+ object KeptPromise {
+ import scala.concurrent.Future
+ import scala.reflect.ClassTag
+
+ private[this] sealed trait Kept[T] extends Promise[T] {
+ def result: Try[T]
+
+ override def value: Option[Try[T]] = Some(result)
- val value = Some(resolveTry(suppliedValue))
+ override def isCompleted: Boolean = true
- override def isCompleted: Boolean = true
+ override def tryComplete(value: Try[T]): Boolean = false
- def tryComplete(value: Try[T]): Boolean = false
+ override def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit =
+ (new CallbackRunnable(executor.prepare(), func)).executeWithValue(result)
- def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = {
- val completedAs = value.get
- val preparedEC = executor.prepare()
- (new CallbackRunnable(preparedEC, func)).executeWithValue(completedAs)
+ override def ready(atMost: Duration)(implicit permit: CanAwait): this.type = this
+
+ override def result(atMost: Duration)(implicit permit: CanAwait): T = result.get
}
- def ready(atMost: Duration)(implicit permit: CanAwait): this.type = this
+ private[this] final class Successful[T](val result: Success[T]) extends Kept[T] {
+ override def onFailure[U](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = ()
+ override def failed: Future[Throwable] = KeptPromise(Failure(new NoSuchElementException("Future.failed not completed with a throwable."))).future
+ override def recover[U >: T](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = this
+ override def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = this
+ override def fallbackTo[U >: T](that: Future[U]): Future[U] = this
+ }
- def result(atMost: Duration)(implicit permit: CanAwait): T = value.get.get
+ private[this] final class Failed[T](val result: Failure[T]) extends Kept[T] {
+ private[this] final def thisAs[S]: Future[S] = future.asInstanceOf[Future[S]]
+
+ override def onSuccess[U](pf: PartialFunction[T, U])(implicit executor: ExecutionContext): Unit = ()
+ override def failed: Future[Throwable] = thisAs[Throwable]
+ override def foreach[U](f: T => U)(implicit executor: ExecutionContext): Unit = ()
+ override def map[S](f: T => S)(implicit executor: ExecutionContext): Future[S] = thisAs[S]
+ override def flatMap[S](f: T => Future[S])(implicit executor: ExecutionContext): Future[S] = thisAs[S]
+ override def flatten[S](implicit ev: T <:< Future[S]): Future[S] = thisAs[S]
+ override def filter(p: T => Boolean)(implicit executor: ExecutionContext): Future[T] = this
+ override def collect[S](pf: PartialFunction[T, S])(implicit executor: ExecutionContext): Future[S] = thisAs[S]
+ override def zip[U](that: Future[U]): Future[(T, U)] = thisAs[(T,U)]
+ override def zipWith[U, R](that: Future[U])(f: (T, U) => R)(implicit executor: ExecutionContext): Future[R] = thisAs[R]
+ override def fallbackTo[U >: T](that: Future[U]): Future[U] =
+ if (this eq that) this else that.recoverWith({ case _ => this })(InternalCallbackExecutor)
+ override def mapTo[S](implicit tag: ClassTag[S]): Future[S] = thisAs[S]
+ }
+
+ def apply[T](result: Try[T]): scala.concurrent.Promise[T] =
+ resolveTry(result) match {
+ case s @ Success(_) => new Successful(s)
+ case f @ Failure(_) => new Failed(f)
+ }
}
}
diff --git a/src/library/scala/deprecatedName.scala b/src/library/scala/deprecatedName.scala
index 07c5c8925c..a0d3aa829b 100644
--- a/src/library/scala/deprecatedName.scala
+++ b/src/library/scala/deprecatedName.scala
@@ -29,4 +29,6 @@ import scala.annotation.meta._
* @since 2.8.1
*/
@param
-class deprecatedName(name: Symbol) extends scala.annotation.StaticAnnotation
+class deprecatedName(name: Symbol) extends scala.annotation.StaticAnnotation {
+ def this() = this(Symbol("<none>"))
+}
diff --git a/src/library/scala/io/Source.scala b/src/library/scala/io/Source.scala
index 9f0b56b4fe..e38c197196 100644
--- a/src/library/scala/io/Source.scala
+++ b/src/library/scala/io/Source.scala
@@ -10,7 +10,7 @@ package scala
package io
import scala.collection.AbstractIterator
-import java.io.{ FileInputStream, InputStream, PrintStream, File => JFile }
+import java.io.{ FileInputStream, InputStream, PrintStream, File => JFile, Closeable }
import java.net.{ URI, URL }
/** This object provides convenience methods to create an iterable
@@ -187,7 +187,7 @@ object Source {
* @author Burak Emir
* @version 1.0
*/
-abstract class Source extends Iterator[Char] {
+abstract class Source extends Iterator[Char] with Closeable {
/** the actual iterator */
protected val iter: Iterator[Char]
diff --git a/src/library/scala/math/package.scala b/src/library/scala/math/package.scala
index 58ece8a05b..b6593d6661 100644
--- a/src/library/scala/math/package.scala
+++ b/src/library/scala/math/package.scala
@@ -26,7 +26,7 @@ package object math {
/** Returns a `double` value with a positive sign, greater than or equal
* to `0.0` and less than `1.0`.
*/
- def random: Double = java.lang.Math.random()
+ def random(): Double = java.lang.Math.random()
def sin(x: Double): Double = java.lang.Math.sin(x)
def cos(x: Double): Double = java.lang.Math.cos(x)
diff --git a/src/library/scala/runtime/BoxesRunTime.java b/src/library/scala/runtime/BoxesRunTime.java
index 82a3b00ac4..a6df20165d 100644
--- a/src/library/scala/runtime/BoxesRunTime.java
+++ b/src/library/scala/runtime/BoxesRunTime.java
@@ -183,7 +183,7 @@ public final class BoxesRunTime
return xc.equals(y);
}
- private static boolean equalsNumChar(java.lang.Number xn, java.lang.Character yc) {
+ public static boolean equalsNumChar(java.lang.Number xn, java.lang.Character yc) {
if (yc == null)
return xn == null;
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index 18fcbf8276..a0d89fc0e1 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -13,6 +13,7 @@ import scala.collection.{ Seq, IndexedSeq, TraversableView, AbstractIterator }
import scala.collection.mutable.WrappedArray
import scala.collection.immutable.{ StringLike, NumericRange, List, Stream, Nil, :: }
import scala.collection.generic.{ Sorted, IsTraversableLike }
+import scala.collection.parallel.ParIterable
import scala.reflect.{ ClassTag, classTag }
import scala.util.control.ControlThrowable
import java.lang.{ Class => jClass }
@@ -326,6 +327,7 @@ object ScalaRunTime {
case x: AnyRef if isArray(x) => arrayToString(x)
case x: scala.collection.Map[_, _] => x.iterator take maxElements map mapInner mkString (x.stringPrefix + "(", ", ", ")")
case x: Iterable[_] => x.iterator take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")")
+ case x: ParIterable[_] => x.iterator take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")")
case x: Traversable[_] => x take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")")
case x: Product1[_] if isTuple(x) => "(" + inner(x._1) + ",)" // that special trailing comma
case x: Product if isTuple(x) => x.productIterator map inner mkString ("(", ",", ")")
diff --git a/src/library/scala/sys/SystemProperties.scala b/src/library/scala/sys/SystemProperties.scala
index d2ebf8c044..6f8b13a89b 100644
--- a/src/library/scala/sys/SystemProperties.scala
+++ b/src/library/scala/sys/SystemProperties.scala
@@ -35,8 +35,15 @@ extends mutable.AbstractMap[String, String]
override def empty = new SystemProperties
override def default(key: String): String = null
- def iterator: Iterator[(String, String)] =
- wrapAccess(System.getProperties().asScala.iterator) getOrElse Iterator.empty
+ def iterator: Iterator[(String, String)] = wrapAccess {
+ val ps = System.getProperties()
+ names map (k => (k, ps getProperty k)) filter (_._2 ne null)
+ } getOrElse Iterator.empty
+
+ def names: Iterator[String] = wrapAccess (
+ System.getProperties().stringPropertyNames().asScala.iterator
+ ) getOrElse Iterator.empty
+
def get(key: String) =
wrapAccess(Option(System.getProperty(key))) flatMap (x => x)
override def contains(key: String) =
diff --git a/src/library/scala/sys/process/BasicIO.scala b/src/library/scala/sys/process/BasicIO.scala
index b31bbf0540..866dac4458 100644
--- a/src/library/scala/sys/process/BasicIO.scala
+++ b/src/library/scala/sys/process/BasicIO.scala
@@ -221,7 +221,7 @@ object BasicIO {
*/
def transferFully(in: InputStream, out: OutputStream): Unit =
try transferFullyImpl(in, out)
- catch onInterrupt(())
+ catch onIOInterrupt(())
private[this] def appendLine(buffer: Appendable): String => Unit = line => {
buffer append line
diff --git a/src/library/scala/sys/process/ProcessImpl.scala b/src/library/scala/sys/process/ProcessImpl.scala
index 2b7fcdeb73..d15f1a2b3d 100644
--- a/src/library/scala/sys/process/ProcessImpl.scala
+++ b/src/library/scala/sys/process/ProcessImpl.scala
@@ -109,45 +109,46 @@ private[process] trait ProcessImpl {
}
private[process] class PipedProcesses(a: ProcessBuilder, b: ProcessBuilder, defaultIO: ProcessIO, toError: Boolean) extends CompoundProcess {
- protected[this] override def runAndExitValue() = {
- val currentSource = new SyncVar[Option[InputStream]]
- val pipeOut = new PipedOutputStream
- val source = new PipeSource(currentSource, pipeOut, a.toString)
+ protected[this] override def runAndExitValue() = runAndExitValue(new PipeSource(a.toString), new PipeSink(b.toString))
+ protected[this] def runAndExitValue(source: PipeSource, sink: PipeSink): Option[Int] = {
+ source connectOut sink
source.start()
-
- val pipeIn = new PipedInputStream(pipeOut)
- val currentSink = new SyncVar[Option[OutputStream]]
- val sink = new PipeSink(pipeIn, currentSink, b.toString)
sink.start()
- def handleOutOrError(fromOutput: InputStream) = currentSource put Some(fromOutput)
+ /** Release PipeSource, PipeSink and Process in the correct order.
+ * If once connect Process with Source or Sink, then the order of releasing them
+ * must be Source -> Sink -> Process, otherwise IOException will be thrown. */
+ def releaseResources(so: PipeSource, sk: PipeSink, p: Process *) = {
+ so.release()
+ sk.release()
+ p foreach( _.destroy() )
+ }
val firstIO =
- if (toError)
- defaultIO.withError(handleOutOrError)
- else
- defaultIO.withOutput(handleOutOrError)
- val secondIO = defaultIO.withInput(toInput => currentSink put Some(toInput))
-
- val second = b.run(secondIO)
- val first = a.run(firstIO)
- try {
- runInterruptible {
- val exit1 = first.exitValue()
- currentSource put None
- currentSink put None
- val exit2 = second.exitValue()
- // Since file redirection (e.g. #>) is implemented as a piped process,
- // we ignore its exit value so cmd #> file doesn't always return 0.
- if (b.hasExitValue) exit2 else exit1
- } {
- first.destroy()
- second.destroy()
+ if (toError) defaultIO.withError(source.connectIn)
+ else defaultIO.withOutput(source.connectIn)
+ val secondIO = defaultIO.withInput(sink.connectOut)
+
+ val second =
+ try b.run(secondIO)
+ catch onError { err =>
+ releaseResources(source, sink)
+ throw err
}
- }
- finally {
- BasicIO close pipeIn
- BasicIO close pipeOut
+ val first =
+ try a.run(firstIO)
+ catch onError { err =>
+ releaseResources(source, sink, second)
+ throw err
+ }
+ runInterruptible {
+ val exit1 = first.exitValue()
+ val exit2 = second.exitValue()
+ // Since file redirection (e.g. #>) is implemented as a piped process,
+ // we ignore its exit value so cmd #> file doesn't always return 0.
+ if (b.hasExitValue) exit2 else exit1
+ } {
+ releaseResources(source, sink, first, second)
}
}
}
@@ -168,37 +169,46 @@ private[process] trait ProcessImpl {
}
}
- private[process] class PipeSource(
- currentSource: SyncVar[Option[InputStream]],
- pipe: PipedOutputStream,
- label: => String
- ) extends PipeThread(false, () => label) {
-
- final override def run(): Unit = currentSource.get match {
- case Some(source) =>
- try runloop(source, pipe)
- finally currentSource.unset()
-
- run()
- case None =>
- currentSource.unset()
- BasicIO close pipe
+ private[process] class PipeSource(label: => String) extends PipeThread(false, () => label) {
+ protected[this] val pipe = new PipedOutputStream
+ protected[this] val source = new LinkedBlockingQueue[Option[InputStream]]
+ override def run(): Unit = {
+ try {
+ source.take match {
+ case Some(in) => runloop(in, pipe)
+ case None =>
+ }
+ }
+ catch onInterrupt(())
+ finally BasicIO close pipe
+ }
+ def connectIn(in: InputStream): Unit = source add Some(in)
+ def connectOut(sink: PipeSink): Unit = sink connectIn pipe
+ def release(): Unit = {
+ interrupt()
+ source add None
+ join()
}
}
- private[process] class PipeSink(
- pipe: PipedInputStream,
- currentSink: SyncVar[Option[OutputStream]],
- label: => String
- ) extends PipeThread(true, () => label) {
-
- final override def run(): Unit = currentSink.get match {
- case Some(sink) =>
- try runloop(pipe, sink)
- finally currentSink.unset()
-
- run()
- case None =>
- currentSink.unset()
+ private[process] class PipeSink(label: => String) extends PipeThread(true, () => label) {
+ protected[this] val pipe = new PipedInputStream
+ protected[this] val sink = new LinkedBlockingQueue[Option[OutputStream]]
+ override def run(): Unit = {
+ try {
+ sink.take match {
+ case Some(out) => runloop(pipe, out)
+ case None =>
+ }
+ }
+ catch onInterrupt(())
+ finally BasicIO close pipe
+ }
+ def connectOut(out: OutputStream): Unit = sink add Some(out)
+ def connectIn(pipeOut: PipedOutputStream): Unit = pipe connect pipeOut
+ def release(): Unit = {
+ interrupt()
+ sink add None
+ join()
}
}
diff --git a/src/library/scala/sys/process/package.scala b/src/library/scala/sys/process/package.scala
index b1976ad4b6..5ec2e73cb9 100644
--- a/src/library/scala/sys/process/package.scala
+++ b/src/library/scala/sys/process/package.scala
@@ -224,16 +224,26 @@ package scala.sys {
final val processDebug = props contains "scala.process.debug"
dbg("Initializing process package.")
- type =?>[-A, +B] = PartialFunction[A, B]
- type Closeable = java.io.Closeable
- type File = java.io.File
- type IOException = java.io.IOException
- type InputStream = java.io.InputStream
- type JProcess = java.lang.Process
- type JProcessBuilder = java.lang.ProcessBuilder
- type OutputStream = java.io.OutputStream
- type SyncVar[T] = scala.concurrent.SyncVar[T]
- type URL = java.net.URL
+ type =?>[-A, +B] = PartialFunction[A, B]
+ type Closeable = java.io.Closeable
+ type File = java.io.File
+ type IOException = java.io.IOException
+ type InterruptedIOException = java.io.InterruptedIOException
+ type InputStream = java.io.InputStream
+ type JProcess = java.lang.Process
+ type JProcessBuilder = java.lang.ProcessBuilder
+ type LinkedBlockingQueue[T] = java.util.concurrent.LinkedBlockingQueue[T]
+ type OutputStream = java.io.OutputStream
+ type SyncVar[T] = scala.concurrent.SyncVar[T]
+ type URL = java.net.URL
+
+ def onError[T](handler: Throwable => T): Throwable =?> T = {
+ case e @ _ => handler(e)
+ }
+
+ def onIOInterrupt[T](handler: => T): Throwable =?> T = {
+ case _: InterruptedIOException => handler
+ }
def onInterrupt[T](handler: => T): Throwable =?> T = {
case _: InterruptedException => handler
diff --git a/src/library/scala/util/Either.scala b/src/library/scala/util/Either.scala
index e196d403c2..6ea9da64f2 100644
--- a/src/library/scala/util/Either.scala
+++ b/src/library/scala/util/Either.scala
@@ -68,7 +68,7 @@ import scala.language.implicitConversions
* @version 1.0, 11/10/2008
* @since 2.7
*/
-sealed abstract class Either[+A, +B] {
+sealed abstract class Either[+A, +B] extends Product with Serializable {
/**
* Projects this `Either` as a `Left`.
*/
diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala
index b0cf122f2a..1e6ae6c591 100644
--- a/src/library/scala/util/Try.scala
+++ b/src/library/scala/util/Try.scala
@@ -60,7 +60,7 @@ import scala.language.implicitConversions
* @author based on Twitter's original implementation in com.twitter.util.
* @since 2.10
*/
-sealed abstract class Try[+T] {
+sealed abstract class Try[+T] extends Product with Serializable {
/** Returns `true` if the `Try` is a `Failure`, `false` otherwise.
*/
@@ -74,16 +74,11 @@ sealed abstract class Try[+T] {
*
* ''Note:'': This will throw an exception if it is not a success and default throws an exception.
*/
- def getOrElse[U >: T](default: => U): U =
- if (isSuccess) get else default
+ def getOrElse[U >: T](default: => U): U
/** Returns this `Try` if it's a `Success` or the given `default` argument if this is a `Failure`.
*/
- def orElse[U >: T](default: => Try[U]): Try[U] =
- try if (isSuccess) this else default
- catch {
- case NonFatal(e) => Failure(e)
- }
+ def orElse[U >: T](default: => Try[U]): Try[U]
/** Returns the value from this `Success` or throws the exception if this is a `Failure`.
*/
@@ -107,6 +102,11 @@ sealed abstract class Try[+T] {
def map[U](f: T => U): Try[U]
/**
+ * Applies the given partial function to the value from this `Success` or returns this if this is a `Failure`.
+ */
+ def collect[U](pf: PartialFunction[T, U]): Try[U]
+
+ /**
* Converts this to a `Failure` if the predicate is not satisfied.
*/
def filter(p: T => Boolean): Try[T]
@@ -133,6 +133,7 @@ sealed abstract class Try[+T] {
* collection" contract even though it seems unlikely to matter much in a
* collection with max size 1.
*/
+ @deprecatedInheritance("You were never supposed to be able to extend this class.", "2.12")
class WithFilter(p: T => Boolean) {
def map[U](f: T => U): Try[U] = Try.this filter p map f
def flatMap[U](f: T => Try[U]): Try[U] = Try.this filter p flatMap f
@@ -144,18 +145,18 @@ sealed abstract class Try[+T] {
* Applies the given function `f` if this is a `Failure`, otherwise returns this if this is a `Success`.
* This is like `flatMap` for the exception.
*/
- def recoverWith[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U]
+ def recoverWith[U >: T](@deprecatedName('f) pf: PartialFunction[Throwable, Try[U]]): Try[U]
/**
* Applies the given function `f` if this is a `Failure`, otherwise returns this if this is a `Success`.
* This is like map for the exception.
*/
- def recover[U >: T](f: PartialFunction[Throwable, U]): Try[U]
+ def recover[U >: T](@deprecatedName('f) pf: PartialFunction[Throwable, U]): Try[U]
/**
* Returns `None` if this is a `Failure` or a `Some` containing the value if this is a `Success`.
*/
- def toOption: Option[T] = if (isSuccess) Some(get) else None
+ def toOption: Option[T]
/**
* Transforms a nested `Try`, ie, a `Try` of type `Try[Try[T]]`,
@@ -172,14 +173,7 @@ sealed abstract class Try[+T] {
/** Completes this `Try` by applying the function `f` to this if this is of type `Failure`, or conversely, by applying
* `s` if this is a `Success`.
*/
- def transform[U](s: T => Try[U], f: Throwable => Try[U]): Try[U] =
- try this match {
- case Success(v) => s(v)
- case Failure(e) => f(e)
- } catch {
- case NonFatal(e) => Failure(e)
- }
-
+ def transform[U](s: T => Try[U], f: Throwable => Try[U]): Try[U]
}
object Try {
@@ -191,57 +185,55 @@ object Try {
try Success(r) catch {
case NonFatal(e) => Failure(e)
}
-
}
final case class Failure[+T](exception: Throwable) extends Try[T] {
- def isFailure: Boolean = true
- def isSuccess: Boolean = false
- def recoverWith[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U] =
- try {
- if (f isDefinedAt exception) f(exception) else this
- } catch {
- case NonFatal(e) => Failure(e)
- }
- def get: T = throw exception
- def flatMap[U](f: T => Try[U]): Try[U] = this.asInstanceOf[Try[U]]
- def flatten[U](implicit ev: T <:< Try[U]): Try[U] = this.asInstanceOf[Try[U]]
- def foreach[U](f: T => U): Unit = ()
- def map[U](f: T => U): Try[U] = this.asInstanceOf[Try[U]]
- def filter(p: T => Boolean): Try[T] = this
- def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] =
- try {
- if (rescueException isDefinedAt exception) {
- Try(rescueException(exception))
- } else this
- } catch {
- case NonFatal(e) => Failure(e)
- }
- def failed: Try[Throwable] = Success(exception)
+ override def isFailure: Boolean = true
+ override def isSuccess: Boolean = false
+ override def get: T = throw exception
+ override def getOrElse[U >: T](default: => U): U = default
+ override def orElse[U >: T](default: => Try[U]): Try[U] =
+ try default catch { case NonFatal(e) => Failure(e) }
+ override def flatMap[U](f: T => Try[U]): Try[U] = this.asInstanceOf[Try[U]]
+ override def flatten[U](implicit ev: T <:< Try[U]): Try[U] = this.asInstanceOf[Try[U]]
+ override def foreach[U](f: T => U): Unit = ()
+ override def transform[U](s: T => Try[U], f: Throwable => Try[U]): Try[U] =
+ try f(exception) catch { case NonFatal(e) => Failure(e) }
+ override def map[U](f: T => U): Try[U] = this.asInstanceOf[Try[U]]
+ override def collect[U](pf: PartialFunction[T, U]): Try[U] = this.asInstanceOf[Try[U]]
+ override def filter(p: T => Boolean): Try[T] = this
+ override def recover[U >: T](@deprecatedName('rescueException) pf: PartialFunction[Throwable, U]): Try[U] =
+ try { if (pf isDefinedAt exception) Success(pf(exception)) else this } catch { case NonFatal(e) => Failure(e) }
+ override def recoverWith[U >: T](@deprecatedName('f) pf: PartialFunction[Throwable, Try[U]]): Try[U] =
+ try { if (pf isDefinedAt exception) pf(exception) else this } catch { case NonFatal(e) => Failure(e) }
+ override def failed: Try[Throwable] = Success(exception)
+ override def toOption: Option[T] = None
}
final case class Success[+T](value: T) extends Try[T] {
- def isFailure: Boolean = false
- def isSuccess: Boolean = true
- def recoverWith[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U] = this
- def get = value
- def flatMap[U](f: T => Try[U]): Try[U] =
- try f(value)
- catch {
- case NonFatal(e) => Failure(e)
- }
- def flatten[U](implicit ev: T <:< Try[U]): Try[U] = value
- def foreach[U](f: T => U): Unit = f(value)
- def map[U](f: T => U): Try[U] = Try[U](f(value))
- def filter(p: T => Boolean): Try[T] = {
+ override def isFailure: Boolean = false
+ override def isSuccess: Boolean = true
+ override def get = value
+ override def getOrElse[U >: T](default: => U): U = get
+ override def orElse[U >: T](default: => Try[U]): Try[U] = this
+ override def flatMap[U](f: T => Try[U]): Try[U] =
+ try f(value) catch { case NonFatal(e) => Failure(e) }
+ override def flatten[U](implicit ev: T <:< Try[U]): Try[U] = value
+ override def foreach[U](f: T => U): Unit = f(value)
+ override def transform[U](s: T => Try[U], f: Throwable => Try[U]): Try[U] = this flatMap s
+ override def map[U](f: T => U): Try[U] = Try[U](f(value))
+ override def collect[U](pf: PartialFunction[T, U]): Try[U] =
try {
- if (p(value)) this
+ if (pf isDefinedAt value) Success(pf(value))
else Failure(new NoSuchElementException("Predicate does not hold for " + value))
- } catch {
- case NonFatal(e) => Failure(e)
- }
- }
- def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] = this
- def failed: Try[Throwable] = Failure(new UnsupportedOperationException("Success.failed"))
+ } catch { case NonFatal(e) => Failure(e) }
+ override def filter(p: T => Boolean): Try[T] =
+ try {
+ if (p(value)) this else Failure(new NoSuchElementException("Predicate does not hold for " + value))
+ } catch { case NonFatal(e) => Failure(e) }
+ override def recover[U >: T](@deprecatedName('rescueException) pf: PartialFunction[Throwable, U]): Try[U] = this
+ override def recoverWith[U >: T](@deprecatedName('f) pf: PartialFunction[Throwable, Try[U]]): Try[U] = this
+ override def failed: Try[Throwable] = Failure(new UnsupportedOperationException("Success.failed"))
+ override def toOption: Option[T] = Some(value)
}
diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala
index 0cbb976a98..0f0f16574e 100644
--- a/src/reflect/scala/reflect/internal/Mirrors.scala
+++ b/src/reflect/scala/reflect/internal/Mirrors.scala
@@ -180,7 +180,7 @@ trait Mirrors extends api.Mirrors {
def getPackageObject(fullname: String): ModuleSymbol = getPackageObject(newTermName(fullname))
def getPackageObject(fullname: TermName): ModuleSymbol =
- (getPackage(fullname).info member nme.PACKAGE) match {
+ (getPackage(fullname).packageObject) match {
case x: ModuleSymbol => x
case _ => MissingRequirementError.notFound("package object " + fullname)
}
@@ -191,15 +191,6 @@ trait Mirrors extends api.Mirrors {
def getPackageObjectIfDefined(fullname: TermName): Symbol =
wrapMissing(getPackageObject(fullname))
- final def getPackageObjectWithMember(pre: Type, sym: Symbol): Symbol = {
- // The owner of a symbol which requires package qualification may be the
- // package object iself, but it also could be any superclass of the package
- // object. In the latter case, we must go through the qualifier's info
- // to obtain the right symbol.
- if (sym.owner.isModuleClass) sym.owner.sourceModule // fast path, if the member is owned by a module class, that must be linked to the package object
- else pre member nme.PACKAGE // otherwise we have to findMember
- }
-
override def staticPackage(fullname: String): ModuleSymbol =
try ensurePackageSymbol(fullname.toString, getModuleOrClass(newTermNameCached(fullname)), allowModules = false)
catch { case mre: MissingRequirementError => throw new ScalaReflectionException(mre.msg) }
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index c0562b0679..15584c382c 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -111,6 +111,7 @@ trait StdNames {
val PACKAGE: NameType = "package"
val ROOT: NameType = "<root>"
val SPECIALIZED_SUFFIX: NameType = "$sp"
+ val CASE_ACCESSOR: NameType = "$access"
val NESTED_IN: String = "$nestedIn"
val NESTED_IN_ANON_CLASS: String = NESTED_IN + ANON_CLASS_NAME.toString.replace("$", "")
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index ef63078f90..b0145f8a89 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -332,7 +332,7 @@ abstract class SymbolTable extends macros.Universe
/** if there's a `package` member object in `pkgClass`, enter its members into it. */
def openPackageModule(pkgClass: Symbol) {
- val pkgModule = pkgClass.info.decl(nme.PACKAGEkw)
+ val pkgModule = pkgClass.packageObject
def fromSource = pkgModule.rawInfo match {
case ltp: SymLoader => ltp.fromSource
case _ => false
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index d23a102b28..00067daa7f 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -818,6 +818,12 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
def skipPackageObject: Symbol = this
+ /** The package object symbol corresponding to this package or package class symbol, or NoSymbol otherwise */
+ def packageObject: Symbol =
+ if (isPackageClass) tpe.packageObject
+ else if (isPackage) moduleClass.packageObject
+ else NoSymbol
+
/** If this is a constructor, its owner: otherwise this.
*/
final def skipConstructor: Symbol = if (isConstructor) owner else this
@@ -857,7 +863,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def isDeprecated = hasAnnotation(DeprecatedAttr)
def deprecationMessage = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 0)
def deprecationVersion = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 1)
- def deprecatedParamName = getAnnotation(DeprecatedNameAttr) flatMap (_ symbolArg 0)
+ def deprecatedParamName = getAnnotation(DeprecatedNameAttr) flatMap (_ symbolArg 0 orElse Some(nme.NO_NAME))
def hasDeprecatedInheritanceAnnotation
= hasAnnotation(DeprecatedInheritanceAttr)
def deprecatedInheritanceMessage
@@ -3379,13 +3385,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def implicitMembers: Scope = {
val tp = info
if ((implicitMembersCacheKey1 ne tp) || (implicitMembersCacheKey2 ne tp.decls.elems)) {
- // Skip a package object class, because the members are also in
- // the package and we wish to avoid spurious ambiguities as in pos/t3999.
- if (!isPackageObjectClass) {
- implicitMembersCacheValue = tp.implicitMembers
- implicitMembersCacheKey1 = tp
- implicitMembersCacheKey2 = tp.decls.elems
- }
+ implicitMembersCacheValue = tp.membersBasedOnFlags(BridgeFlags, IMPLICIT)
+ implicitMembersCacheKey1 = tp
+ implicitMembersCacheKey2 = tp.decls.elems
}
implicitMembersCacheValue
}
diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala
index b3e11a826e..4cedfe2665 100644
--- a/src/reflect/scala/reflect/internal/TreeGen.scala
+++ b/src/reflect/scala/reflect/internal/TreeGen.scala
@@ -191,8 +191,8 @@ abstract class TreeGen {
)
val pkgQualifier =
if (needsPackageQualifier) {
- val packageObject = rootMirror.getPackageObjectWithMember(qual.tpe, sym)
- Select(qual, nme.PACKAGE) setSymbol packageObject setType singleType(qual.tpe, packageObject)
+ val packageObject = qualsym.packageObject
+ Select(qual, nme.PACKAGE) setSymbol packageObject setType packageObject.typeOfThis
}
else qual
diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala
index 4657fa0000..7ad5fdf096 100644
--- a/src/reflect/scala/reflect/internal/TreeInfo.scala
+++ b/src/reflect/scala/reflect/internal/TreeInfo.scala
@@ -128,6 +128,7 @@ abstract class TreeInfo {
symOk(tree.symbol)
&& tree.symbol.isStable
&& !definitions.isByNameParamType(tree.tpe)
+ && !definitions.isByName(tree.symbol)
&& (allowVolatile || !tree.symbol.hasVolatileType) // TODO SPEC: not required by spec
)
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 8f114caac0..b65063d9d4 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -589,7 +589,12 @@ trait Types
def nonPrivateMembersAdmitting(admit: Long): Scope = membersBasedOnFlags(BridgeAndPrivateFlags & ~admit, 0)
/** A list of all implicit symbols of this type (defined or inherited) */
- def implicitMembers: Scope = membersBasedOnFlags(BridgeFlags, IMPLICIT)
+ def implicitMembers: Scope = {
+ typeSymbolDirect match {
+ case sym: ModuleClassSymbol => sym.implicitMembers
+ case _ => membersBasedOnFlags(BridgeFlags, IMPLICIT)
+ }
+ }
/** A list of all deferred symbols of this type (defined or inherited) */
def deferredMembers: Scope = membersBasedOnFlags(BridgeFlags, DEFERRED)
@@ -606,6 +611,8 @@ trait Types
def nonPrivateMember(name: Name): Symbol =
memberBasedOnName(name, BridgeAndPrivateFlags)
+ def packageObject: Symbol = member(nme.PACKAGE)
+
/** The non-private member with given name, admitting members with given flags `admit`.
* "Admitting" refers to the fact that members with a PRIVATE, BRIDGE, or VBRIDGE
* flag are usually excluded from findMember results, but supplying any of those flags
@@ -659,7 +666,7 @@ trait Types
)
if (trivial) this
else {
- val m = newAsSeenFromMap(pre.normalize, clazz)
+ val m = new AsSeenFromMap(pre.normalize, clazz)
val tp = m(this)
val tp1 = existentialAbstraction(m.capturedParams, tp)
@@ -1600,7 +1607,14 @@ trait Types
private var normalized: Type = _
private def normalizeImpl = {
// TODO see comments around def intersectionType and def merge
- def flatten(tps: List[Type]): List[Type] = tps flatMap { case RefinedType(parents, ds) if ds.isEmpty => flatten(parents) case tp => List(tp) }
+ // SI-8575 The dealias is needed here to keep subtyping transitive, example in run/t8575b.scala
+ def flatten(tps: List[Type]): List[Type] = {
+ def dealiasRefinement(tp: Type) = if (tp.dealias.isInstanceOf[RefinedType]) tp.dealias else tp
+ tps map dealiasRefinement flatMap {
+ case RefinedType(parents, ds) if ds.isEmpty => flatten(parents)
+ case tp => List(tp)
+ }
+ }
val flattened = flatten(parents).distinct
if (decls.isEmpty && hasLength(flattened, 1)) {
flattened.head
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
index c705ca7069..d6d2cf3383 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
@@ -449,12 +449,15 @@ private[internal] trait TypeMaps {
(pre eq NoType) || (pre eq NoPrefix) || !isPossiblePrefix(clazz)
)
- def newAsSeenFromMap(pre: Type, clazz: Symbol): AsSeenFromMap =
- new AsSeenFromMap(pre, clazz)
+ @deprecated("Use new AsSeenFromMap instead", "2.12.0")
+ final def newAsSeenFromMap(pre: Type, clazz: Symbol): AsSeenFromMap = new AsSeenFromMap(pre, clazz)
/** A map to compute the asSeenFrom method.
*/
- class AsSeenFromMap(seenFromPrefix: Type, seenFromClass: Symbol) extends TypeMap with KeepOnlyTypeConstraints {
+ class AsSeenFromMap(seenFromPrefix0: Type, seenFromClass: Symbol) extends TypeMap with KeepOnlyTypeConstraints {
+ private val seenFromPrefix: Type = if (seenFromPrefix0.typeSymbolDirect.hasPackageFlag && !seenFromClass.hasPackageFlag)
+ seenFromPrefix0.packageObject.typeOfThis
+ else seenFromPrefix0
// Some example source constructs relevant in asSeenFrom:
//
// object CaptureThis {
diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala
index d5b5967145..ac7839bcfd 100644
--- a/src/reflect/scala/reflect/internal/transform/Erasure.scala
+++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala
@@ -280,8 +280,17 @@ trait Erasure {
}
object boxingErasure extends ScalaErasureMap {
+ private var boxPrimitives = true
+
+ override def applyInArray(tp: Type): Type = {
+ val saved = boxPrimitives
+ boxPrimitives = false
+ try super.applyInArray(tp)
+ finally boxPrimitives = saved
+ }
+
override def eraseNormalClassRef(tref: TypeRef) =
- if (isPrimitiveValueClass(tref.sym)) boxedClass(tref.sym).tpe
+ if (boxPrimitives && isPrimitiveValueClass(tref.sym)) boxedClass(tref.sym).tpe
else super.eraseNormalClassRef(tref)
override def eraseDerivedValueClassRef(tref: TypeRef) =
super.eraseNormalClassRef(tref)
diff --git a/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala
index cc217d2f80..f853df0484 100755
--- a/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala
@@ -62,15 +62,15 @@ trait MemberLookupBase {
syms.flatMap { case (sym, owner) =>
// reconstruct the original link
def linkName(sym: Symbol) = {
- def nameString(s: Symbol) = s.nameString + (if ((s.isModule || s.isModuleClass) && !s.isPackage) "$" else "")
- val packageSuffix = if (sym.isPackage) ".package" else ""
+ def nameString(s: Symbol) = s.nameString + (if ((s.isModule || s.isModuleClass) && !s.hasPackageFlag) "$" else "")
+ val packageSuffix = if (sym.hasPackageFlag) ".package" else ""
sym.ownerChain.reverse.filterNot(isRoot(_)).map(nameString(_)).mkString(".") + packageSuffix
}
- if (sym.isClass || sym.isModule || sym.isTrait || sym.isPackage)
+ if (sym.isClass || sym.isModule || sym.isTrait || sym.hasPackageFlag)
findExternalLink(sym, linkName(sym))
- else if (owner.isClass || owner.isModule || owner.isTrait || owner.isPackage)
+ else if (owner.isClass || owner.isModule || owner.isTrait || owner.hasPackageFlag)
findExternalLink(sym, linkName(owner) + "@" + externalSignature(sym))
else
None
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
index ce75749859..86155845b0 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
@@ -206,25 +206,42 @@ abstract class HtmlPage extends Page { thisPage =>
case tpl :: tpls => templateToHtml(tpl) ++ sep ++ templatesToHtml(tpls, sep)
}
- /** Returns the _big image name corresponding to the DocTemplate Entity (upper left icon) */
- def docEntityKindToBigImage(ety: DocTemplateEntity) =
- if (ety.isTrait && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None) "trait_to_object_big.png"
- else if (ety.isTrait) "trait_big.png"
- else if (ety.isClass && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None) "class_to_object_big.png"
- else if (ety.isClass) "class_big.png"
- else if ((ety.isAbstractType || ety.isAliasType) && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None) "type_to_object_big.png"
- else if ((ety.isAbstractType || ety.isAliasType)) "type_big.png"
- else if (ety.isObject && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None && ety.companion.get.isClass) "object_to_class_big.png"
- else if (ety.isObject && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None && ety.companion.get.isTrait) "object_to_trait_big.png"
- else if (ety.isObject && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None && (ety.companion.get.isAbstractType || ety.companion.get.isAliasType)) "object_to_trait_big.png"
- else if (ety.isObject) "object_big.png"
- else if (ety.isPackage) "package_big.png"
- else "class_big.png" // FIXME: an entity *should* fall into one of the above categories, but AnyRef is somehow not
+ object Image extends Enumeration {
+ val Trait, Class, Type, Object, Package = Value
+ }
+
+ /** Returns the _big image name and the alt attribute
+ * corresponding to the DocTemplate Entity (upper left icon) */
+ def docEntityKindToBigImage(ety: DocTemplateEntity) = {
+ def entityToImage(e: DocTemplateEntity) =
+ if (e.isTrait) Image.Trait
+ else if (e.isClass) Image.Class
+ else if (e.isAbstractType || e.isAliasType) Image.Type
+ else if (e.isObject) Image.Object
+ else if (e.isPackage) Image.Package
+ else {
+ // FIXME: an entity *should* fall into one of the above categories,
+ // but AnyRef is somehow not
+ Image.Class
+ }
+
+ val image = entityToImage(ety)
+ val companionImage = ety.companion filter {
+ e => e.visibility.isPublic && ! e.inSource.isEmpty
+ } map { entityToImage }
+
+ (image, companionImage) match {
+ case (from, Some(to)) =>
+ ((from + "_to_" + to + "_big.png").toLowerCase, from + "/" + to)
+ case (from, None) =>
+ ((from + "_big.png").toLowerCase, from.toString)
+ }
+ }
def permalink(template: Entity, isSelf: Boolean = true): Elem =
<span class="permalink">
<a href={ memberToUrl(template, isSelf) } title="Permalink" target="_top">
- <img src={ relativeLinkTo(List("permalink.png", "lib")) } />
+ <img src={ relativeLinkTo(List("permalink.png", "lib")) } alt="Permalink" />
</a>
</span>
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
index e10c54a414..357c397f05 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
@@ -103,11 +103,13 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
<body class={ if (tpl.isType) "type" else "value" }>
<div id="definition">
{
+ val (src, alt) = docEntityKindToBigImage(tpl)
+
tpl.companion match {
case Some(companion) if (companion.visibility.isPublic && companion.inSource != None) =>
- <a href={relativeLinkTo(companion)} title={docEntityKindToCompanionTitle(tpl)}><img src={ relativeLinkTo(List(docEntityKindToBigImage(tpl), "lib")) }/></a>
+ <a href={relativeLinkTo(companion)} title={docEntityKindToCompanionTitle(tpl)}><img alt={alt} src={ relativeLinkTo(List(src, "lib")) }/></a>
case _ =>
- <img src={ relativeLinkTo(List(docEntityKindToBigImage(tpl), "lib")) }/>
+ <img alt={alt} src={ relativeLinkTo(List(src, "lib")) }/>
}}
{ owner }
<h1>{ displayName }</h1>{
@@ -145,7 +147,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
{ if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty) NodeSeq.Empty else
{
if (!tpl.linearizationTemplates.isEmpty)
- <div id="ancestors">
+ <div class="ancestors">
<span class="filtertype">Inherited<br/>
</span>
<ol id="linearization">
@@ -155,7 +157,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
else NodeSeq.Empty
} ++ {
if (!tpl.conversions.isEmpty)
- <div id="ancestors">
+ <div class="ancestors">
<span class="filtertype">Implicitly<br/>
</span>
<ol id="implicits"> {
@@ -169,7 +171,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
</div>
else NodeSeq.Empty
} ++
- <div id="ancestors">
+ <div class="ancestors">
<span class="filtertype"></span>
<ol>
<li class="hideall out"><span>Hide All</span></li>
@@ -203,28 +205,28 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
}
{ if (absValueMembers.isEmpty) NodeSeq.Empty else
- <div id="values" class="values members">
+ <div class="values members">
<h3>Abstract Value Members</h3>
<ol>{ absValueMembers map (memberToHtml(_, tpl)) }</ol>
</div>
}
{ if (concValueMembers.isEmpty) NodeSeq.Empty else
- <div id="values" class="values members">
+ <div class="values members">
<h3>{ if (absValueMembers.isEmpty) "Value Members" else "Concrete Value Members" }</h3>
<ol>{ concValueMembers map (memberToHtml(_, tpl)) }</ol>
</div>
}
{ if (shadowedImplicitMembers.isEmpty) NodeSeq.Empty else
- <div id="values" class="values members">
+ <div class="values members">
<h3>Shadowed Implicit Value Members</h3>
<ol>{ shadowedImplicitMembers map (memberToHtml(_, tpl)) }</ol>
</div>
}
{ if (deprValueMembers.isEmpty) NodeSeq.Empty else
- <div id="values" class="values members">
+ <div class="values members">
<h3>Deprecated Value Members</h3>
<ol>{ deprValueMembers map (memberToHtml(_, tpl)) }</ol>
</div>
@@ -289,13 +291,19 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
}
def memberToHtml(mbr: MemberEntity, inTpl: DocTemplateEntity): NodeSeq = {
+ // Sometimes it's same, do we need signatureCompat still?
+ val sig = if (mbr.signature == mbr.signatureCompat) {
+ <a id={ mbr.signature }/>
+ } else {
+ <a id={ mbr.signature }/><a id={ mbr.signatureCompat }/>
+ }
+
val memberComment = memberToCommentHtml(mbr, inTpl, isSelf = false)
<li name={ mbr.definitionName } visbl={ if (mbr.visibility.isProtected) "prt" else "pub" }
data-isabs={ mbr.isAbstract.toString }
fullComment={ if(memberComment.filter(_.label=="div").isEmpty) "no" else "yes" }
group={ mbr.group }>
- <a id={ mbr.signature }/>
- <a id={ mbr.signatureCompat }/>
+ { sig }
{ signature(mbr, isSelf = false) }
{ memberComment }
</li>
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
index b541cf721b..b478c6424c 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
@@ -364,7 +364,7 @@ class DotDiagramGenerator(settings: doc.Settings, dotRunner: DotRunner) extends
// add an id and class attribute to the SVG element
case Elem(prefix, "svg", attribs, scope, child @ _*) => {
val klass = if (isInheritanceDiagram) "class-diagram" else "package-diagram"
- Elem(prefix, "svg", attribs, scope, child map(x => transform(x)) : _*) %
+ Elem(prefix, "svg", attribs, scope, true, child map(x => transform(x)) : _*) %
new UnprefixedAttribute("id", "graph" + counter, Null) %
new UnprefixedAttribute("class", klass, Null)
}
@@ -378,7 +378,7 @@ class DotDiagramGenerator(settings: doc.Settings, dotRunner: DotRunner) extends
// assign id and class attributes to edges and nodes:
// the id attribute generated by dot has the format: "{class}|{id}"
case g @ Elem(prefix, "g", attribs, scope, children @ _*) if (List("edge", "node").contains((g \ "@class").toString)) => {
- var res = new Elem(prefix, "g", attribs, scope, (children map(x => transform(x))): _*)
+ var res = new Elem(prefix, "g", attribs, scope, true, (children map(x => transform(x))): _*)
val dotId = (g \ "@id").toString
if (dotId.count(_ == '|') == 1) {
val Array(klass, id) = dotId.toString.split("\\|")
@@ -395,11 +395,11 @@ class DotDiagramGenerator(settings: doc.Settings, dotRunner: DotRunner) extends
val imageNode = <image xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href={ ("./lib/" + kind + "_diagram.png") } width="16px" height="16px" preserveAspectRatio="xMinYMin meet" x={ xposition.get.toString } y={ yposition.get.toString }/>
val anchorNode = (g \ "a") match {
case Seq(Elem(prefix, "a", attribs, scope, children @ _*)) =>
- transform(new Elem(prefix, "a", attribs, scope, (children ++ imageNode): _*))
+ transform(new Elem(prefix, "a", attribs, scope, true, (children ++ imageNode): _*))
case _ =>
g \ "a"
}
- res = new Elem(prefix, "g", attribs, scope, anchorNode: _*)
+ res = new Elem(prefix, "g", attribs, scope, true, anchorNode: _*)
DiagramStats.addFixedImage()
}
}
@@ -413,7 +413,7 @@ class DotDiagramGenerator(settings: doc.Settings, dotRunner: DotRunner) extends
scala.xml.Text("")
// apply recursively
case Elem(prefix, label, attribs, scope, child @ _*) =>
- Elem(prefix, label, attribs, scope, child map(x => transform(x)) : _*)
+ Elem(prefix, label, attribs, scope, true, child map(x => transform(x)) : _*)
case x => x
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css
index e129e6cf6a..6eee280267 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css
@@ -217,7 +217,7 @@ dl.attributes > dd {
height: 18px;
}
-#values ol li:last-child {
+.values ol li:last-child {
margin-bottom: 5px;
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js
index 1ebcb67f04..c1e3010834 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js
@@ -147,19 +147,19 @@ $(document).ready(function(){
filter();
});
- $("#mbrsel > div[id=ancestors] > ol > li.hideall").click(function() {
+ $("#mbrsel > div.ancestors > ol > li.hideall").click(function() {
$("#linearization li.in").removeClass("in").addClass("out");
$("#linearization li:first").removeClass("out").addClass("in");
$("#implicits li.in").removeClass("in").addClass("out");
- if ($(this).hasClass("out") && $("#mbrsel > div[id=ancestors] > ol > li.showall").hasClass("in")) {
+ if ($(this).hasClass("out") && $("#mbrsel > div.ancestors > ol > li.showall").hasClass("in")) {
$(this).removeClass("out").addClass("in");
- $("#mbrsel > div[id=ancestors] > ol > li.showall").removeClass("in").addClass("out");
+ $("#mbrsel > div.ancestors > ol > li.showall").removeClass("in").addClass("out");
}
filter();
})
- $("#mbrsel > div[id=ancestors] > ol > li.showall").click(function() {
+ $("#mbrsel > div.ancestors > ol > li.showall").click(function() {
var filteredLinearization =
$("#linearization li.out").filter(function() {
return ! isHiddenClass($(this).attr("name"));
@@ -172,14 +172,14 @@ $(document).ready(function(){
});
filteredImplicits.removeClass("out").addClass("in");
- if ($(this).hasClass("out") && $("#mbrsel > div[id=ancestors] > ol > li.hideall").hasClass("in")) {
+ if ($(this).hasClass("out") && $("#mbrsel > div.ancestors > ol > li.hideall").hasClass("in")) {
$(this).removeClass("out").addClass("in");
- $("#mbrsel > div[id=ancestors] > ol > li.hideall").removeClass("in").addClass("out");
+ $("#mbrsel > div.ancestors > ol > li.hideall").removeClass("in").addClass("out");
}
filter();
});
- $("#visbl > ol > li.public").click(function() {
+ $("#order > ol > li.alpha").click(function() {
if ($(this).hasClass("out")) {
orderAlpha();
}
@@ -275,7 +275,7 @@ function orderAlpha() {
$("#order > ol > li.group").removeClass("in").addClass("out");
$("#template > div.parent").hide();
$("#template > div.conversion").hide();
- $("#mbrsel > div[id=ancestors]").show();
+ $("#mbrsel > div.ancestors").show();
filter();
};
@@ -285,7 +285,7 @@ function orderInherit() {
$("#order > ol > li.group").removeClass("in").addClass("out");
$("#template > div.parent").show();
$("#template > div.conversion").show();
- $("#mbrsel > div[id=ancestors]").hide();
+ $("#mbrsel > div.ancestors").hide();
filter();
};
@@ -295,7 +295,7 @@ function orderGroup() {
$("#order > ol > li.inherit").removeClass("in").addClass("out");
$("#template > div.parent").hide();
$("#template > div.conversion").hide();
- $("#mbrsel > div[id=ancestors]").show();
+ $("#mbrsel > div.ancestors").show();
filter();
};
@@ -350,7 +350,7 @@ function initInherit() {
}
});
- $("#values > ol > li").each(function(){
+ $(".values > ol > li").each(function(){
var mbr = $(this);
this.mbrText = mbr.find("> .fullcomment .cmt").text();
var qualName = mbr.attr("name");
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala
index 64eb1adbea..20aaab29fc 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala
@@ -40,7 +40,7 @@ trait MemberLookup extends base.MemberLookupBase {
override def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal] = {
val sym1 =
if (sym == AnyClass || sym == AnyRefClass || sym == AnyValClass || sym == NothingClass) ListClass
- else if (sym.isPackage)
+ else if (sym.hasPackageFlag)
/* Get package object which has associatedFile ne null */
sym.info.member(newTermName("package"))
else sym
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
index 03d71f15a3..96731acf56 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -93,10 +93,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
trait TemplateImpl extends EntityImpl with TemplateEntity {
override def qualifiedName: String =
if (inTemplate == null || inTemplate.isRootPackage) name else optimize(inTemplate.qualifiedName + "." + name)
- def isPackage = sym.isPackage
+ def isPackage = sym.hasPackageFlag
def isTrait = sym.isTrait
def isClass = sym.isClass && !sym.isTrait
- def isObject = sym.isModule && !sym.isPackage
+ def isObject = sym.isModule && !sym.hasPackageFlag
def isCaseClass = sym.isCaseClass
def isRootPackage = false
def selfType = if (sym.thisSym eq sym) None else Some(makeType(sym.thisSym.typeOfThis, this))
@@ -254,7 +254,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def valueParams: List[List[ValueParam]] = Nil /** TODO, these are now only computed for DocTemplates */
def parentTypes =
- if (sym.isPackage || sym == AnyClass) List() else {
+ if (sym.hasPackageFlag || sym == AnyClass) List() else {
val tps = (this match {
case a: AliasType => sym.tpe.dealias.parents
case a: AbstractType => sym.info.bounds match {
@@ -665,7 +665,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
s != EmptyPackage && s != RootPackage
}
})
- else if (bSym.isPackage) // (2)
+ else if (bSym.hasPackageFlag) // (2)
if (settings.skipPackage(makeQualifiedName(bSym)))
None
else
@@ -778,7 +778,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
Some(new MemberTemplateImpl(bSym, inTpl) with AliasImpl with AliasType {
override def isAliasType = true
})
- else if (!modelFinished && (bSym.isPackage || templateShouldDocument(bSym, inTpl)))
+ else if (!modelFinished && (bSym.hasPackageFlag || templateShouldDocument(bSym, inTpl)))
modelCreation.createTemplate(bSym, inTpl)
else
None
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
index 2b7e2506d4..ea72fa6095 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
@@ -94,7 +94,7 @@ trait ModelFactoryTypeSupport {
LinkToMember(bMbr, oTpl)
case _ =>
val name = makeQualifiedName(bSym)
- if (!bSym.owner.isPackage)
+ if (!bSym.owner.hasPackageFlag)
Tooltip(name)
else
findExternalLink(bSym, name).getOrElse (
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala
index 86a7a67160..c1228e8735 100755
--- a/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala
@@ -49,7 +49,7 @@ trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory =>
case _ =>
}
else if (asym.isTerm && asym.owner.isClass){
- if (asym.isSetter) asym = asym.getter(asym.owner)
+ if (asym.isSetter) asym = asym.getterIn(asym.owner)
makeTemplate(asym.owner) match {
case docTmpl: DocTemplateImpl =>
val mbrs: Option[MemberImpl] = findMember(asym, docTmpl)
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
index 44d8886e4e..b300752a34 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
@@ -177,7 +177,7 @@ trait DiagramDirectiveParser {
def warning(message: String) = {
// we need the position from the package object (well, ideally its comment, but yeah ...)
- val sym = if (template.sym.isPackage) template.sym.info.member(global.nme.PACKAGE) else template.sym
+ val sym = if (template.sym.hasPackageFlag) template.sym.packageObject else template.sym
assert((sym != global.NoSymbol) || (sym == global.rootMirror.RootPackage))
global.reporter.warning(sym.pos, message)
}
diff --git a/test/files/jvm/future-spec.check b/test/files/jvm/future-spec.check
index df1629dd7e..5c80aa5586 100644
--- a/test/files/jvm/future-spec.check
+++ b/test/files/jvm/future-spec.check
@@ -1 +1 @@
-warning: there was one deprecation warning; re-run with -deprecation for details
+warning: there were 21 deprecation warnings; re-run with -deprecation for details
diff --git a/test/files/jvm/future-spec/FutureTests.scala b/test/files/jvm/future-spec/FutureTests.scala
index a290af9cd3..6b34d5bfaa 100644
--- a/test/files/jvm/future-spec/FutureTests.scala
+++ b/test/files/jvm/future-spec/FutureTests.scala
@@ -17,6 +17,19 @@ class FutureTests extends MinimalScalaTest {
case "NoReply" => Promise[String]().future
}
+ def fail(msg: String): Nothing = throw new AssertionError(msg)
+
+ def ECNotUsed[T](f: ExecutionContext => T): T = {
+ val p = Promise[Runnable]()
+ val unusedEC: ExecutionContext = new ExecutionContext {
+ def execute(r: Runnable) = p.success(r)
+ def reportFailure(t: Throwable): Unit = p.failure(t)
+ }
+ val t = f(unusedEC)
+ assert(p.future.value == None, "Future executed logic!")
+ t
+ }
+
val defaultTimeout = 5 seconds
/* future specification */
@@ -68,6 +81,60 @@ class FutureTests extends MinimalScalaTest {
}
}
+ "Futures" should {
+ "have proper toString representations" in {
+ import ExecutionContext.Implicits.global
+ val s = 5
+ val f = new Exception("foo")
+ val t = Try(throw f)
+
+ val expectFailureString = "Future(Failure("+f+"))"
+ val expectSuccessString = "Future(Success(5))"
+ val expectNotCompleteString = "Future(<not completed>)"
+
+ Future.successful(s).toString mustBe expectSuccessString
+ Future.failed(f).toString mustBe expectFailureString
+ Future.fromTry(t).toString mustBe expectFailureString
+ val p = Promise[Int]()
+ p.toString mustBe expectNotCompleteString
+ Promise[Int]().success(s).toString mustBe expectSuccessString
+ Promise[Int]().failure(f).toString mustBe expectFailureString
+ Await.ready(Future { throw f }, 2000 millis).toString mustBe expectFailureString
+ Await.ready(Future { s }, 2000 millis).toString mustBe expectSuccessString
+
+ Future.never.toString mustBe "Future(<never>)"
+ Future.unit.toString mustBe "Future(Success(()))"
+ }
+
+ "have proper const representation for success" in {
+ val s = "foo"
+ val f = Future.successful(s)
+
+ ECNotUsed(ec => f.onFailure({ case _ => fail("onFailure should not have been called") })(ec))
+ assert( ECNotUsed(ec => f.recover({ case _ => fail("recover should not have been called")})(ec)) eq f)
+ assert( ECNotUsed(ec => f.recoverWith({ case _ => fail("flatMap should not have been called")})(ec)) eq f)
+ assert(f.fallbackTo(f) eq f, "Future.fallbackTo must be the same instance as Future.fallbackTo")
+ }
+
+ "have proper const representation for failure" in {
+ val e = new Exception("foo")
+ val f = Future.failed[Future[String]](e)
+
+ assert(f.mapTo[String] eq f, "Future.mapTo must be the same instance as Future.mapTo")
+ assert(f.zip(f) eq f, "Future.zip must be the same instance as Future.zip")
+ assert(f.flatten eq f, "Future.flatten must be the same instance as Future.flatten")
+ assert(f.failed eq f, "Future.failed must be the same instance as Future.failed")
+
+ ECNotUsed(ec => f.foreach(_ => fail("foreach should not have been called"))(ec))
+ ECNotUsed(ec => f.onSuccess({ case _ => fail("onSuccess should not have been called") })(ec))
+ assert( ECNotUsed(ec => f.map(_ => fail("map should not have been called"))(ec)) eq f)
+ assert( ECNotUsed(ec => f.flatMap(_ => fail("flatMap should not have been called"))(ec)) eq f)
+ assert( ECNotUsed(ec => f.filter(_ => fail("filter should not have been called"))(ec)) eq f)
+ assert( ECNotUsed(ec => f.collect({ case _ => fail("collect should not have been called")})(ec)) eq f)
+ assert( ECNotUsed(ec => f.zipWith(f)({ (_,_) => fail("zipWith should not have been called")})(ec)) eq f)
+ }
+ }
+
"The Future companion object" should {
"call ExecutionContext.prepare on apply" in {
val p = Promise[Boolean]()
@@ -85,6 +152,49 @@ class FutureTests extends MinimalScalaTest {
Await.result(f, defaultTimeout) mustBe ("foo")
Await.result(p.future, defaultTimeout) mustBe (true)
}
+
+ "have a unit member representing an already completed Future containing Unit" in {
+ assert(Future.unit ne null, "Future.unit must not be null")
+ assert(Future.unit eq Future.unit, "Future.unit must be the same instance as Future.unit")
+ assert(Future.unit.isCompleted, "Future.unit must already be completed")
+ assert(Future.unit.value.get == Success(()), "Future.unit must contain a Success(())")
+ }
+
+ "have a never member representing a never completed Future of Nothing" in {
+
+ val test: Future[Nothing] = Future.never
+
+ //Verify stable identifier
+ test match {
+ case Future.`never` =>
+ case _ => fail("Future.never did not match Future.`never`")
+ }
+
+ assert(test eq Future.never, "Future.never must be the same instance as Future.never")
+ assert(test ne null, "Future.never must not be null")
+ assert(!test.isCompleted && test.value.isEmpty, "Future.never must never be completed")
+ assert(test.failed eq test)
+ assert(test.asInstanceOf[Future[Future[Nothing]]].flatten eq test)
+ assert(test.zip(test) eq test)
+ assert(test.fallbackTo(test) eq test)
+ assert(test.mapTo[String] eq test)
+
+ ECNotUsed(ec => test.foreach(_ => fail("foreach should not have been called"))(ec))
+ ECNotUsed(ec => test.onSuccess({ case _ => fail("onSuccess should not have been called") })(ec))
+ ECNotUsed(ec => test.onFailure({ case _ => fail("onFailure should not have been called") })(ec))
+ ECNotUsed(ec => test.onComplete({ case _ => fail("onComplete should not have been called") })(ec))
+ ECNotUsed(ec => test.transform(identity, identity)(ec) eq test)
+ ECNotUsed(ec => test.transform(identity)(ec) eq test)
+ ECNotUsed(ec => test.transformWith(_ => fail("transformWith should not have been called"))(ec) eq test)
+ ECNotUsed(ec => test.map(identity)(ec) eq test)
+ ECNotUsed(ec => test.flatMap(_ => fail("flatMap should not have been called"))(ec) eq test)
+ ECNotUsed(ec => test.filter(_ => fail("filter should not have been called"))(ec) eq test)
+ ECNotUsed(ec => test.collect({ case _ => fail("collect should not have been called")})(ec) eq test)
+ ECNotUsed(ec => test.recover({ case _ => fail("recover should not have been called")})(ec) eq test)
+ ECNotUsed(ec => test.recoverWith({ case _ => fail("recoverWith should not have been called")})(ec) eq test)
+ ECNotUsed(ec => test.andThen({ case _ => fail("andThen should not have been called")})(ec) eq test)
+ ECNotUsed(ec => test.zipWith(test)({ (_,_) => fail("zipWith should not have been called")})(ec) eq test)
+ }
}
"The default ExecutionContext" should {
@@ -218,6 +328,142 @@ class FutureTests extends MinimalScalaTest {
} mustBe (r)
}
+ "transform results to results" in {
+ val f1 = Future.successful("foo").transform(_.map(_.toUpperCase))
+ val f2 = Future("bar").transform(_.map(_.toUpperCase))
+ Await.result(f1, defaultTimeout) mustBe "FOO"
+ Await.result(f2, defaultTimeout) mustBe "BAR"
+ }
+
+ "transform failures to failures" in {
+ val initial = new Exception("Initial")
+ val expected1 = new Exception("Expected1")
+ val expected2 = new Exception("Expected2")
+ val f1 = Future(throw initial) transform {
+ case Failure(`initial`) => Failure(expected1)
+ case x => x
+ }
+ val f2 = Future.failed(initial) transform {
+ case Failure(`initial`) => Failure(expected2)
+ case x => x
+ }
+
+ intercept[Exception] { Await.result(f1, defaultTimeout) } mustBe expected1
+ intercept[Exception] { Await.result(f2, defaultTimeout) } mustBe expected2
+ }
+
+ "transform failures to results" in {
+ val initial1 = new Exception("Initial1")
+ val initial2 = new Exception("Initial2")
+ val f1 = Future.failed[String](initial1) transform {
+ case Failure(`initial1`) => Success("foo")
+ case x => x
+ }
+ val f2 = Future[String](throw initial2) transform {
+ case Failure(`initial2`) => Success("bar")
+ case x => x
+ }
+ Await.result(f1, defaultTimeout) mustBe "foo"
+ Await.result(f2, defaultTimeout) mustBe "bar"
+ }
+
+ "transform results to failures" in {
+ val expected1 = new Exception("Expected1")
+ val expected2 = new Exception("Expected2")
+ val expected3 = new Exception("Expected3")
+ val f1 = Future.successful("foo") transform {
+ case Success("foo") => Failure(expected1)
+ case x => x
+ }
+ val f2 = Future("bar") transform {
+ case Success("bar") => Failure(expected2)
+ case x => x
+ }
+ val f3 = Future("bar") transform {
+ case Success("bar") => throw expected3
+ case x => x
+ }
+ intercept[Exception] { Await.result(f1, defaultTimeout) } mustBe expected1
+ intercept[Exception] { Await.result(f2, defaultTimeout) } mustBe expected2
+ intercept[Exception] { Await.result(f3, defaultTimeout) } mustBe expected3
+ }
+
+ "transformWith results" in {
+ val f1 = Future.successful("foo").transformWith {
+ case Success(r) => Future(r.toUpperCase)
+ case f @ Failure(_) => Future.fromTry(f)
+ }
+ val f2 = Future("bar").transformWith {
+ case Success(r) => Future(r.toUpperCase)
+ case f @ Failure(_) => Future.fromTry(f)
+ }
+ Await.result(f1, defaultTimeout) mustBe "FOO"
+ Await.result(f2, defaultTimeout) mustBe "BAR"
+ }
+
+ "transformWith failures" in {
+ val initial = new Exception("Initial")
+ val expected1 = new Exception("Expected1")
+ val expected2 = new Exception("Expected2")
+ val expected3 = new Exception("Expected3")
+
+ val f1 = Future[Int](throw initial).transformWith {
+ case Failure(`initial`) => Future failed expected1
+ case x => Future fromTry x
+ }
+ val f2 = Future.failed[Int](initial).transformWith {
+ case Failure(`initial`) => Future failed expected2
+ case x => Future fromTry x
+ }
+ val f3 = Future[Int](throw initial).transformWith {
+ case Failure(`initial`) => throw expected3
+ case x => Future fromTry x
+ }
+
+ intercept[Exception] { Await.result(f1, defaultTimeout) } mustBe expected1
+ intercept[Exception] { Await.result(f2, defaultTimeout) } mustBe expected2
+ intercept[Exception] { Await.result(f3, defaultTimeout) } mustBe expected3
+ }
+
+ "transformWith failures to future success" in {
+ val initial = new Exception("Initial")
+ val f1 = Future.failed[String](initial).transformWith {
+ case Failure(`initial`) => Future("FOO")
+ case _ => Future failed initial
+ }
+ val f2 = Future[String](throw initial).transformWith {
+ case Failure(`initial`) => Future("BAR")
+ case _ => Future failed initial
+ }
+ Await.result(f1, defaultTimeout) mustBe "FOO"
+ Await.result(f2, defaultTimeout) mustBe "BAR"
+ }
+
+ "transformWith results to future failures" in {
+ val initial = new Exception("Initial")
+ val expected1 = new Exception("Expected1")
+ val expected2 = new Exception("Expected2")
+ val expected3 = new Exception("Expected3")
+
+ val f1 = Future[String]("FOO") transformWith {
+ case Success("FOO") => Future failed expected1
+ case _ => Future successful "FOO"
+ }
+ val f2 = Future.successful("FOO") transformWith {
+ case Success("FOO") => Future failed expected2
+ case _ => Future successful "FOO"
+ }
+ val f3 = Future.successful("FOO") transformWith {
+ case Success("FOO") => throw expected3
+ case _ => Future successful "FOO"
+ }
+
+
+ intercept[Exception] { Await.result(f1, defaultTimeout) } mustBe expected1
+ intercept[Exception] { Await.result(f2, defaultTimeout) } mustBe expected2
+ intercept[Exception] { Await.result(f3, defaultTimeout) } mustBe expected3
+ }
+
"andThen like a boss" in {
val q = new java.util.concurrent.LinkedBlockingQueue[Int]
for (i <- 1 to 1000) {
@@ -281,6 +527,33 @@ class FutureTests extends MinimalScalaTest {
Await.result(successful, timeout) mustBe (("foo", "foo"))
}
+ "zipWith" in {
+ val timeout = 10000 millis
+ val f = new IllegalStateException("test")
+ intercept[IllegalStateException] {
+ val failed = Future.failed[String](f).zipWith(Future.successful("foo")) { _ -> _ }
+ Await.result(failed, timeout)
+ } mustBe (f)
+
+ intercept[IllegalStateException] {
+ val failed = Future.successful("foo").zipWith(Future.failed[String](f)) { _ -> _ }
+ Await.result(failed, timeout)
+ } mustBe (f)
+
+ intercept[IllegalStateException] {
+ val failed = Future.failed[String](f).zipWith(Future.failed[String](f)) { _ -> _ }
+ Await.result(failed, timeout)
+ } mustBe (f)
+
+ val successful = Future.successful("foo").zipWith(Future.successful("foo")) { _ -> _ }
+ Await.result(successful, timeout) mustBe (("foo", "foo"))
+
+ val failure = Future.successful("foo").zipWith(Future.successful("foo")) { (_,_) => throw f }
+ intercept[IllegalStateException] {
+ Await.result(failure, timeout)
+ } mustBe (f)
+ }
+
"fold" in {
val timeout = 10000 millis
def async(add: Int, wait: Int) = Future {
diff --git a/test/files/jvm/scala-concurrent-tck.check b/test/files/jvm/scala-concurrent-tck.check
new file mode 100644
index 0000000000..bbe73c9982
--- /dev/null
+++ b/test/files/jvm/scala-concurrent-tck.check
@@ -0,0 +1 @@
+warning: there were 74 deprecation warnings; re-run with -deprecation for details
diff --git a/test/files/jvm/scala-concurrent-tck.scala b/test/files/jvm/scala-concurrent-tck.scala
index ce86d4aef0..ba405e97bd 100644
--- a/test/files/jvm/scala-concurrent-tck.scala
+++ b/test/files/jvm/scala-concurrent-tck.scala
@@ -165,6 +165,100 @@ def testTransformFailure(): Unit = once {
g onFailure { case e => done(e eq transformed) }
}
+ def testTransformResultToResult(): Unit = once {
+ done =>
+ Future("foo").transform {
+ case Success(s) => Success(s.toUpperCase)
+ case Failure(f) => throw new Exception("test failed")
+ } onComplete {
+ case Success("FOO") => done(true)
+ case _ => done(false)
+ }
+ }
+
+ def testTransformResultToFailure(): Unit = once {
+ done =>
+ val e = new Exception("expected")
+ Future("foo").transform {
+ case Success(s) => Failure(e)
+ case Failure(f) => throw new Exception("test failed")
+ } onComplete {
+ case Failure(`e`) => done(true)
+ case _ => done(false)
+ }
+ }
+
+ def testTransformFailureToResult(): Unit = once {
+ done =>
+ val e = "foo"
+ Future(throw new Exception("initial")).transform {
+ case Success(s) => throw new Exception("test failed")
+ case Failure(f) => Success(e)
+ } onComplete {
+ case Success(`e`) => done(true)
+ case _ => done(false)
+ }
+ }
+
+ def testTransformFailureToFailure(): Unit = once {
+ done =>
+ val e = new Exception("expected")
+ Future(throw new Exception("initial")).transform {
+ case Success(s) => throw new Exception("test failed")
+ case Failure(f) => Failure(e)
+ } onComplete {
+ case Failure(`e`) => done(true)
+ case _ => done(false)
+ }
+ }
+
+ def testTransformWithResultToResult(): Unit = once {
+ done =>
+ Future("foo").transformWith {
+ case Success(s) => Future(s.toUpperCase)
+ case Failure(f) => throw new Exception("test failed")
+ } onComplete {
+ case Success("FOO") => done(true)
+ case _ => done(false)
+ }
+ }
+
+ def testTransformWithResultToFailure(): Unit = once {
+ done =>
+ val e = new Exception("expected")
+ Future("foo").transformWith {
+ case Success(s) => Future(throw e)
+ case Failure(f) => throw new Exception("test failed")
+ } onComplete {
+ case Failure(`e`) => done(true)
+ case _ => done(false)
+ }
+ }
+
+ def testTransformWithFailureToResult(): Unit = once {
+ done =>
+ val e = "foo"
+ Future(throw new Exception("initial")).transformWith {
+ case Success(s) => throw new Exception("test failed")
+ case Failure(f) => Future(e)
+ } onComplete {
+ case Success(`e`) => done(true)
+ case _ => done(false)
+ }
+ }
+
+ def testTransformWithFailureToFailure(): Unit = once {
+ done =>
+ val e = new Exception("expected")
+ Future(throw new Exception("initial")).transformWith {
+ case Success(s) => throw new Exception("test failed")
+ case Failure(f) => Future(throw e)
+ } onComplete {
+ case Failure(`e`) => done(true)
+ case _ => done(false)
+ }
+ }
+
def testFoldFailure(): Unit = once {
done =>
val f = Future[Unit] { throw new Exception("expected") }
@@ -352,6 +446,14 @@ def testTransformFailure(): Unit = once {
h onFailure { case e => done(e eq cause) }
}
+ def testFallbackToThis(): Unit = {
+ def check(f: Future[Int]) = assert((f fallbackTo f) eq f)
+
+ check(Future { 1 })
+ check(Future.successful(1))
+ check(Future.failed[Int](new Exception))
+ }
+
testMapSuccess()
testMapFailure()
testFlatMapSuccess()
@@ -373,6 +475,16 @@ def testTransformFailure(): Unit = once {
testFallbackToFailure()
testTransformSuccess()
testTransformSuccessPF()
+ testTransformFailure()
+ testTransformFailurePF()
+ testTransformResultToResult()
+ testTransformResultToFailure()
+ testTransformFailureToResult()
+ testTransformFailureToFailure()
+ testTransformWithResultToResult()
+ testTransformWithResultToFailure()
+ testTransformWithFailureToResult()
+ testTransformWithFailureToFailure()
}
@@ -593,6 +705,17 @@ trait Exceptions extends TestBase {
}
+trait GlobalExecutionContext extends TestBase {
+ def testNameOfGlobalECThreads(): Unit = once {
+ done => Future({
+ val expectedName = "scala-execution-context-global-"+ Thread.currentThread.getId
+ done(expectedName == Thread.currentThread.getName)
+ })(ExecutionContext.global)
+ }
+
+ testNameOfGlobalECThreads()
+}
+
trait CustomExecutionContext extends TestBase {
import scala.concurrent.{ ExecutionContext, Awaitable }
@@ -772,6 +895,7 @@ with FutureProjections
with Promises
with BlockContexts
with Exceptions
+with GlobalExecutionContext
with CustomExecutionContext
with ExecutionContextPrepare
{
diff --git a/test/files/jvm/t7146.scala b/test/files/jvm/t7146.scala
index aaa3dc7ca4..ea734472d5 100644
--- a/test/files/jvm/t7146.scala
+++ b/test/files/jvm/t7146.scala
@@ -10,7 +10,7 @@ object Test {
ExecutionContext.global.toString.startsWith("scala.concurrent.impl.ExecutionContextImpl"))
val i = ExecutionContext.global.asInstanceOf[{ def executor: Executor }]
println("should be scala.concurrent.forkjoin.ForkJoinPool == " +
- i.executor.toString.startsWith("scala.concurrent.forkjoin.ForkJoinPool"))
+ (i.executor.getClass.getSuperclass.getName == "scala.concurrent.forkjoin.ForkJoinPool"))
val u = i.executor.
asInstanceOf[{ def getUncaughtExceptionHandler: Thread.UncaughtExceptionHandler }].
getUncaughtExceptionHandler
diff --git a/test/files/jvm/t8582.check b/test/files/jvm/t8582.check
index e388366270..0e4da90398 100644
--- a/test/files/jvm/t8582.check
+++ b/test/files/jvm/t8582.check
@@ -1,3 +1,6 @@
+t8582.scala:17: warning: class BeanInfo in package beans is deprecated: the generation of BeanInfo classes is no longer supported
+ class C1
+ ^
getClass on module gives module class
class p1.p2.Singleton$Singleton$
diff --git a/test/files/jvm/t8582.flags b/test/files/jvm/t8582.flags
new file mode 100644
index 0000000000..dcc59ebe32
--- /dev/null
+++ b/test/files/jvm/t8582.flags
@@ -0,0 +1 @@
+-deprecation
diff --git a/test/files/neg/beanInfoDeprecation.check b/test/files/neg/beanInfoDeprecation.check
new file mode 100644
index 0000000000..788b277818
--- /dev/null
+++ b/test/files/neg/beanInfoDeprecation.check
@@ -0,0 +1,6 @@
+beanInfoDeprecation.scala:2: warning: class BeanInfo in package beans is deprecated: the generation of BeanInfo classes is no longer supported
+class C
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/beanInfoDeprecation.flags b/test/files/neg/beanInfoDeprecation.flags
new file mode 100644
index 0000000000..c6bfaf1f64
--- /dev/null
+++ b/test/files/neg/beanInfoDeprecation.flags
@@ -0,0 +1 @@
+-deprecation -Xfatal-warnings
diff --git a/test/files/neg/beanInfoDeprecation.scala b/test/files/neg/beanInfoDeprecation.scala
new file mode 100644
index 0000000000..c7e3a86202
--- /dev/null
+++ b/test/files/neg/beanInfoDeprecation.scala
@@ -0,0 +1,2 @@
+@scala.beans.BeanInfo
+class C
diff --git a/test/files/neg/logImplicits.check b/test/files/neg/logImplicits.check
index 270882b71a..f9eb79645b 100644
--- a/test/files/neg/logImplicits.check
+++ b/test/files/neg/logImplicits.check
@@ -4,7 +4,7 @@ logImplicits.scala:2: applied implicit conversion from xs.type to ?{def size: ?}
logImplicits.scala:7: applied implicit conversion from String("abc") to ?{def map: ?} = implicit def augmentString(x: String): scala.collection.immutable.StringOps
def f = "abc" map (_ + 1)
^
-logImplicits.scala:15: inferred view from String("abc") to Int = C.this.convert:(p: String("abc"))Int
+logImplicits.scala:15: inferred view from String("abc") to Int = C.this.convert:(p: String)Int
math.max(122, x: Int)
^
logImplicits.scala:19: applied implicit conversion from Int(1) to ?{def ->: ?} = implicit def ArrowAssoc[A](self: A): ArrowAssoc[A]
diff --git a/test/files/neg/names-defaults-neg.check b/test/files/neg/names-defaults-neg.check
index 2db24b6f32..194be72250 100644
--- a/test/files/neg/names-defaults-neg.check
+++ b/test/files/neg/names-defaults-neg.check
@@ -118,68 +118,74 @@ names-defaults-neg.scala:93: warning: the parameter name y has been deprecated.
names-defaults-neg.scala:93: error: parameter 'b' is already specified at parameter position 1
deprNam3(y = 10, b = 2)
^
-names-defaults-neg.scala:98: error: unknown parameter name: m
+names-defaults-neg.scala:96: warning: naming parameter deprNam4Arg has been deprecated.
+ deprNam4(deprNam4Arg = null)
+ ^
+names-defaults-neg.scala:98: warning: naming parameter deprNam5Arg has been deprecated.
+ deprNam5(deprNam5Arg = null)
+ ^
+names-defaults-neg.scala:102: error: unknown parameter name: m
f3818(y = 1, m = 1)
^
-names-defaults-neg.scala:131: error: reference to var2 is ambiguous; it is both a method parameter and a variable in scope.
+names-defaults-neg.scala:135: error: reference to var2 is ambiguous; it is both a method parameter and a variable in scope.
delay(var2 = 40)
^
-names-defaults-neg.scala:134: error: missing parameter type for expanded function ((x$1) => a = x$1)
+names-defaults-neg.scala:138: error: missing parameter type for expanded function ((x$1) => a = x$1)
val taf2: Int => Unit = testAnnFun(a = _, b = get("+"))
^
-names-defaults-neg.scala:134: error: not found: value a
+names-defaults-neg.scala:138: error: not found: value a
val taf2: Int => Unit = testAnnFun(a = _, b = get("+"))
^
-names-defaults-neg.scala:134: error: not found: value get
+names-defaults-neg.scala:138: error: not found: value get
val taf2: Int => Unit = testAnnFun(a = _, b = get("+"))
^
-names-defaults-neg.scala:135: error: parameter 'a' is already specified at parameter position 1
+names-defaults-neg.scala:139: error: parameter 'a' is already specified at parameter position 1
val taf3 = testAnnFun(b = _: String, a = get(8))
^
-names-defaults-neg.scala:136: error: missing parameter type for expanded function ((x$3) => testAnnFun(x$3, ((x$4) => b = x$4)))
+names-defaults-neg.scala:140: error: missing parameter type for expanded function ((x$3) => testAnnFun(x$3, ((x$4) => b = x$4)))
val taf4: (Int, String) => Unit = testAnnFun(_, b = _)
^
-names-defaults-neg.scala:136: error: missing parameter type for expanded function ((x$4) => b = x$4)
+names-defaults-neg.scala:140: error: missing parameter type for expanded function ((x$4) => b = x$4)
val taf4: (Int, String) => Unit = testAnnFun(_, b = _)
^
-names-defaults-neg.scala:136: error: not found: value b
+names-defaults-neg.scala:140: error: not found: value b
val taf4: (Int, String) => Unit = testAnnFun(_, b = _)
^
-names-defaults-neg.scala:144: error: variable definition needs type because 'x' is used as a named argument in its body.
+names-defaults-neg.scala:148: error: variable definition needs type because 'x' is used as a named argument in its body.
def t3 { var x = t.f(x = 1) }
^
-names-defaults-neg.scala:147: error: variable definition needs type because 'x' is used as a named argument in its body.
+names-defaults-neg.scala:151: error: variable definition needs type because 'x' is used as a named argument in its body.
object t6 { var x = t.f(x = 1) }
^
-names-defaults-neg.scala:147: warning: failed to determine if 'x = ...' is a named argument or an assignment expression.
+names-defaults-neg.scala:151: warning: failed to determine if 'x = ...' is a named argument or an assignment expression.
an explicit type is required for the definition mentioned in the error message above.
object t6 { var x = t.f(x = 1) }
^
-names-defaults-neg.scala:150: error: variable definition needs type because 'x' is used as a named argument in its body.
+names-defaults-neg.scala:154: error: variable definition needs type because 'x' is used as a named argument in its body.
class t9 { var x = t.f(x = 1) }
^
-names-defaults-neg.scala:150: warning: failed to determine if 'x = ...' is a named argument or an assignment expression.
+names-defaults-neg.scala:154: warning: failed to determine if 'x = ...' is a named argument or an assignment expression.
an explicit type is required for the definition mentioned in the error message above.
class t9 { var x = t.f(x = 1) }
^
-names-defaults-neg.scala:164: error: variable definition needs type because 'x' is used as a named argument in its body.
+names-defaults-neg.scala:168: error: variable definition needs type because 'x' is used as a named argument in its body.
def u3 { var x = u.f(x = 1) }
^
-names-defaults-neg.scala:167: error: variable definition needs type because 'x' is used as a named argument in its body.
+names-defaults-neg.scala:171: error: variable definition needs type because 'x' is used as a named argument in its body.
def u6 { var x = u.f(x = "32") }
^
-names-defaults-neg.scala:170: error: reference to x is ambiguous; it is both a method parameter and a variable in scope.
+names-defaults-neg.scala:174: error: reference to x is ambiguous; it is both a method parameter and a variable in scope.
def u9 { var x: Int = u.f(x = 1) }
^
-names-defaults-neg.scala:177: error: variable definition needs type because 'x' is used as a named argument in its body.
+names-defaults-neg.scala:181: error: variable definition needs type because 'x' is used as a named argument in its body.
class u15 { var x = u.f(x = 1) }
^
-names-defaults-neg.scala:177: warning: failed to determine if 'x = ...' is a named argument or an assignment expression.
+names-defaults-neg.scala:181: warning: failed to determine if 'x = ...' is a named argument or an assignment expression.
an explicit type is required for the definition mentioned in the error message above.
class u15 { var x = u.f(x = 1) }
^
-names-defaults-neg.scala:180: error: reference to x is ambiguous; it is both a method parameter and a variable in scope.
+names-defaults-neg.scala:184: error: reference to x is ambiguous; it is both a method parameter and a variable in scope.
class u18 { var x: Int = u.f(x = 1) }
^
-four warnings found
+6 warnings found
46 errors found
diff --git a/test/files/neg/names-defaults-neg.scala b/test/files/neg/names-defaults-neg.scala
index 042f73708c..c809b9a7a2 100644
--- a/test/files/neg/names-defaults-neg.scala
+++ b/test/files/neg/names-defaults-neg.scala
@@ -92,6 +92,10 @@ object Test extends App {
def deprNam3(@deprecatedName('x) a: Int, @deprecatedName('y) b: Int) = a + b
deprNam3(y = 10, b = 2)
+ def deprNam4(@deprecatedName('deprNam4Arg) deprNam4Arg: String) = 0
+ deprNam4(deprNam4Arg = null)
+ def deprNam5(@deprecatedName deprNam5Arg: String) = 0
+ deprNam5(deprNam5Arg = null)
// t3818
def f3818(x: Int = 1, y: Int, z: Int = 1) = 0
diff --git a/test/files/neg/t8764.check b/test/files/neg/t8764.check
deleted file mode 100644
index 6d89ebe106..0000000000
--- a/test/files/neg/t8764.check
+++ /dev/null
@@ -1,6 +0,0 @@
-t8764.scala:8: error: type mismatch;
- found : AnyVal
- required: Double
- val d: Double = a.productElement(0)
- ^
-one error found
diff --git a/test/files/neg/t8764.flags b/test/files/neg/t8764.flags
deleted file mode 100644
index 48fd867160..0000000000
--- a/test/files/neg/t8764.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xexperimental
diff --git a/test/files/neg/t8764.scala b/test/files/neg/t8764.scala
deleted file mode 100644
index dc5bfb0160..0000000000
--- a/test/files/neg/t8764.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-object Main {
-
- case class IntAndDouble(i: Int, d: Double)
-
- // a.productElement used to be Int => Double
- // now: Int => AnyVal
- val a = IntAndDouble(1, 5.0)
- val d: Double = a.productElement(0)
-}
diff --git a/test/files/neg/t8849.check b/test/files/neg/t8849.check
new file mode 100644
index 0000000000..15b00aee8b
--- /dev/null
+++ b/test/files/neg/t8849.check
@@ -0,0 +1,7 @@
+t8849.scala:8: error: ambiguous implicit values:
+ both value global in object Implicits of type => scala.concurrent.ExecutionContext
+ and value dummy of type scala.concurrent.ExecutionContext
+ match expected type scala.concurrent.ExecutionContext
+ require(implicitly[ExecutionContext] eq dummy)
+ ^
+one error found
diff --git a/test/files/neg/t8849.scala b/test/files/neg/t8849.scala
new file mode 100644
index 0000000000..336f16b40f
--- /dev/null
+++ b/test/files/neg/t8849.scala
@@ -0,0 +1,10 @@
+import scala.concurrent.ExecutionContext
+import ExecutionContext.Implicits.global
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ implicit val dummy: ExecutionContext = null
+ require(scala.concurrent.ExecutionContext.Implicits.global ne null)
+ require(implicitly[ExecutionContext] eq dummy)
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/alladin763.scala b/test/files/pos/alladin763.scala
new file mode 100644
index 0000000000..29c9b25318
--- /dev/null
+++ b/test/files/pos/alladin763.scala
@@ -0,0 +1,37 @@
+// Test from http://lrytz.github.io/scala-aladdin-bugtracker/displayItem.do%3Fid=763.html
+// and expanded with package object variants
+
+
+trait Foo { type T; def apply() : T }
+object e extends Foo { type T = Int; def apply() = 42 }
+
+package p {
+ trait T[X] { def O : { def apply(): X } }
+ object `package` extends T[Int] {
+ def O: { def apply(): Int } = new { def apply(): Int = 42 }
+ }
+
+ object Test {
+ val x: Int = O()
+ }
+}
+
+object Test {
+
+ val f = new Foo { type T = Int; def apply() = 42 }
+
+ def main(args: Array[String]): Unit = {
+ val g = new Foo { type T = Int; def apply() = 42 }
+
+ (e: Foo)()
+ val ee: Int = e()
+
+ (f: Foo)()
+ val ff: Int = f()
+
+ (g: Foo)()
+ val gg: Int = g()
+
+ val pp: Int = p.O()
+ }
+}
diff --git a/test/files/pos/t6778.scala b/test/files/pos/t6778.scala
new file mode 100644
index 0000000000..b7483c8fce
--- /dev/null
+++ b/test/files/pos/t6778.scala
@@ -0,0 +1,5 @@
+object test extends AnyRef with App {
+ // Check that random can be called with parenthesis.
+ scala.math.random()
+}
+
diff --git a/test/files/pos/t7784.scala b/test/files/pos/t7784.scala
new file mode 100644
index 0000000000..e6824a4203
--- /dev/null
+++ b/test/files/pos/t7784.scala
@@ -0,0 +1,13 @@
+object Test {
+ final val a = ""
+ var b: a.type = a
+ b = a
+
+ final val x = classOf[Object]
+ var y: x.type = x
+ y = x
+
+ final val e = Thread.State.NEW
+ var e1: e.type = e
+ e1 = e
+}
diff --git a/test/files/pos/t8462.scala b/test/files/pos/t8462.scala
new file mode 100644
index 0000000000..6946cf8e5e
--- /dev/null
+++ b/test/files/pos/t8462.scala
@@ -0,0 +1,11 @@
+
+trait ConstantOps {
+ def exprs = (
+ 1 << 2L : Int, // was: error: type mismatch; found : Long(4L)
+ 64 >> 2L : Int, // was: error: type mismatch; found : Long(4L)
+ 64 >>> 2L : Int, // was: error: type mismatch; found : Long(4L)
+ 'a' << 2L : Int,
+ 'a' >> 2L : Int,
+ 'a'>>> 2L : Int
+ )
+}
diff --git a/test/files/pos/t8862a.scala b/test/files/pos/t8862a.scala
new file mode 100644
index 0000000000..f9576707ba
--- /dev/null
+++ b/test/files/pos/t8862a.scala
@@ -0,0 +1,47 @@
+package p {
+
+ abstract class C[A] {
+ def x: A
+ implicit def oops: A = x
+ implicit def oopso: Option[A] = None
+ }
+
+ package q {
+
+ class Oops
+
+ object `package` extends C[Oops] {
+ override def x = new Oops
+ }
+
+ object Blah {
+ oops
+ oopso
+
+ // implicits found in enclosing context
+ implicitly[Oops]
+ implicitly[Option[Oops]]
+ }
+ }
+}
+
+package other {
+
+ object Blah {
+ // implicits found through this import
+ import p.q._
+
+ oops
+ oopso
+
+ implicitly[Oops]
+ implicitly[Option[Oops]]
+ }
+
+
+ object Blee {
+ // implicits found through the companion implicits
+ implicitly[p.q.Oops]
+ implicitly[Option[p.q.Oops]]
+ }
+}
diff --git a/test/files/pos/t8862b.scala b/test/files/pos/t8862b.scala
new file mode 100644
index 0000000000..8be7fb5fab
--- /dev/null
+++ b/test/files/pos/t8862b.scala
@@ -0,0 +1,12 @@
+package p {
+ trait T[X] { def O : { def apply(): X } }
+ object `package` extends T[Int] {
+ def O: { def apply(): Int } = new { def apply(): Int = 42 }
+ }
+
+ object Test {
+ def main(args: Array[String]): Unit = {
+ val x: Int = O()
+ }
+ }
+}
diff --git a/test/files/pos/t9074.scala b/test/files/pos/t9074.scala
new file mode 100644
index 0000000000..67db281f54
--- /dev/null
+++ b/test/files/pos/t9074.scala
@@ -0,0 +1,24 @@
+package blam {
+
+ package foo {
+
+ trait F[T] {
+ def f(d: Double, t: T): T = ???
+ def f(d: Int, t: T): T = ???
+ def f(d: String, t: T): T = ???
+
+ def g[A](a: T): T = ???
+ def g(a: Int) = ???
+ }
+ }
+
+ package object foo extends foo.F[Double] {
+ override def f(d: Double, t: Double): Double = ???
+ }
+}
+
+object Test {
+ import blam._
+ foo.f("3", 4.0)
+ foo.g[Any](1d) : Double
+}
diff --git a/test/files/pos/t9074b.scala b/test/files/pos/t9074b.scala
new file mode 100644
index 0000000000..dadcebf399
--- /dev/null
+++ b/test/files/pos/t9074b.scala
@@ -0,0 +1,15 @@
+trait Echo [T] {
+ def echo(t: T): Unit
+}
+
+trait IntEcho extends Echo[Int] {
+ def echo(t: Int) = println(t)
+}
+
+object echo extends IntEcho
+package object echo1 extends IntEcho
+
+object App extends App {
+ echo.echo(1)
+ echo1.echo(1)
+}
diff --git a/test/files/pos/t9131.scala b/test/files/pos/t9131.scala
new file mode 100644
index 0000000000..1a186a0a24
--- /dev/null
+++ b/test/files/pos/t9131.scala
@@ -0,0 +1,12 @@
+class Test {
+
+ def byNameFunc(f: (=> (() => Any)) => Any): Unit = ()
+
+ def test = {
+ // "value apply is not a member of => () => Any"
+ byNameFunc(z => z())
+ // okay
+ byNameFunc(z => z.apply())
+ byNameFunc(z => {val f = z; f()})
+ }
+}
diff --git a/test/files/run/analyzerPlugins.check b/test/files/run/analyzerPlugins.check
index 9803465ddc..1bb7c6ceab 100644
--- a/test/files/run/analyzerPlugins.check
+++ b/test/files/run/analyzerPlugins.check
@@ -19,7 +19,7 @@ canAdaptAnnotations(Trees$Typed, Any) [1]
canAdaptAnnotations(Trees$Typed, Int) [1]
lub(List(Int @testAnn, Int)) [1]
pluginsPt(?, Trees$Annotated) [7]
-pluginsPt(?, Trees$Apply) [8]
+pluginsPt(?, Trees$Apply) [11]
pluginsPt(?, Trees$ApplyImplicitView) [2]
pluginsPt(?, Trees$Assign) [7]
pluginsPt(?, Trees$Block) [4]
@@ -31,7 +31,7 @@ pluginsPt(?, Trees$Literal) [16]
pluginsPt(?, Trees$New) [5]
pluginsPt(?, Trees$PackageDef) [1]
pluginsPt(?, Trees$Return) [1]
-pluginsPt(?, Trees$Select) [47]
+pluginsPt(?, Trees$Select) [50]
pluginsPt(?, Trees$Super) [2]
pluginsPt(?, Trees$This) [20]
pluginsPt(?, Trees$TypeApply) [3]
@@ -93,6 +93,7 @@ pluginsTypeSigAccessor(value x) [1]
pluginsTypeSigAccessor(value y) [1]
pluginsTypeSigAccessor(variable count) [2]
pluginsTyped( <: Int, Trees$TypeBoundsTree) [2]
+pluginsTyped(()Double, Trees$Select) [6]
pluginsTyped(()Object, Trees$Select) [1]
pluginsTyped(()String, Trees$Ident) [1]
pluginsTyped(()String, Trees$TypeApply) [1]
@@ -112,7 +113,7 @@ pluginsTyped(<notype>, Trees$PackageDef) [1]
pluginsTyped(<notype>, Trees$TypeDef) [1]
pluginsTyped(<notype>, Trees$ValDef) [21]
pluginsTyped(=> Boolean @testAnn, Trees$Select) [1]
-pluginsTyped(=> Double, Trees$Select) [4]
+pluginsTyped(=> Double, Trees$Select) [1]
pluginsTyped(=> Int, Trees$Select) [5]
pluginsTyped(=> Int, Trees$TypeApply) [1]
pluginsTyped(=> String @testAnn, Trees$Select) [1]
@@ -131,6 +132,7 @@ pluginsTyped(Boolean(false), Trees$Literal) [2]
pluginsTyped(Boolean, Trees$Apply) [1]
pluginsTyped(Boolean, Trees$Select) [4]
pluginsTyped(Char('c'), Trees$Literal) [2]
+pluginsTyped(Double, Trees$Apply) [3]
pluginsTyped(Double, Trees$Select) [6]
pluginsTyped(Int @testAnn, Trees$TypeTree) [2]
pluginsTyped(Int @testAnn, Trees$Typed) [2]
diff --git a/test/files/run/classfile-format-51.scala b/test/files/run/classfile-format-51.scala
index 24b1ee8397..4351757a64 100644
--- a/test/files/run/classfile-format-51.scala
+++ b/test/files/run/classfile-format-51.scala
@@ -1,6 +1,5 @@
import java.io.{File, FileOutputStream}
-import scala.tools.nsc.settings.ScalaVersion
import scala.tools.partest._
import scala.tools.asm
import asm.{AnnotationVisitor, ClassWriter, FieldVisitor, Handle, MethodVisitor, Opcodes}
diff --git a/test/files/run/classfile-format-52.scala b/test/files/run/classfile-format-52.scala
index e12c84124c..6646e081c4 100644
--- a/test/files/run/classfile-format-52.scala
+++ b/test/files/run/classfile-format-52.scala
@@ -1,6 +1,5 @@
import java.io.{File, FileOutputStream}
-import scala.tools.nsc.settings.ScalaVersion
import scala.tools.partest._
import scala.tools.asm
import asm.{AnnotationVisitor, ClassWriter, FieldVisitor, Handle, MethodVisitor, Opcodes}
diff --git a/test/files/run/duration-coarsest.scala b/test/files/run/duration-coarsest.scala
index 51cb79287a..81fbb3cc84 100644
--- a/test/files/run/duration-coarsest.scala
+++ b/test/files/run/duration-coarsest.scala
@@ -25,4 +25,7 @@ object Test extends App {
23 hours,
40 days
) foreach (x => assert(x == x.toCoarsest, x))
-} \ No newline at end of file
+
+ // toCoarsest on a FiniteDuration should return a FiniteDuration
+ val finite: FiniteDuration = 1.second.toCoarsest
+}
diff --git a/test/files/run/future-flatmap-exec-count.check b/test/files/run/future-flatmap-exec-count.check
index dd9dce64ed..7065c133e0 100644
--- a/test/files/run/future-flatmap-exec-count.check
+++ b/test/files/run/future-flatmap-exec-count.check
@@ -1,3 +1,4 @@
+warning: there was one deprecation warning; re-run with -deprecation for details
mapping
execute()
flatmapping
diff --git a/test/files/run/inline-ex-handlers.check b/test/files/run/inline-ex-handlers.check
index 7c885d2cc9..27a277d314 100644
--- a/test/files/run/inline-ex-handlers.check
+++ b/test/files/run/inline-ex-handlers.check
@@ -123,12 +123,12 @@
300 RETURN(UNIT)
@@ -583,6 +603,6 @@
with finalizer: null
-- catch (Throwable) in ArrayBuffer(7, 9, 10) starting at: 6
-+ catch (Throwable) in ArrayBuffer(7, 9, 10, 11) starting at: 6
+- catch (Throwable) in Vector(7, 9, 10) starting at: 6
++ catch (Throwable) in Vector(7, 9, 10, 11) starting at: 6
consisting of blocks: List(6)
with finalizer: null
-- catch (Throwable) in ArrayBuffer(4, 6, 7, 9, 10) starting at: 3
-+ catch (Throwable) in ArrayBuffer(4, 6, 7, 9, 10, 11, 12) starting at: 3
+- catch (Throwable) in Vector(4, 6, 7, 9, 10) starting at: 3
++ catch (Throwable) in Vector(4, 6, 7, 9, 10, 11, 12) starting at: 3
consisting of blocks: List(3)
@@ -618,3 +638,3 @@
startBlock: 1
@@ -171,8 +171,8 @@
}
@@ -690,3 +730,3 @@
with finalizer: null
-- catch (<none>) in ArrayBuffer(4, 5, 6, 8) starting at: 3
-+ catch (<none>) in ArrayBuffer(4, 5, 6, 8, 10) starting at: 3
+- catch (<none>) in Vector(4, 5, 6, 8) starting at: 3
++ catch (<none>) in Vector(4, 5, 6, 8, 10) starting at: 3
consisting of blocks: List(3)
@@ -714,5 +754,5 @@
def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
@@ -276,12 +276,12 @@
}
@@ -852,6 +918,6 @@
with finalizer: null
-- catch (Throwable) in ArrayBuffer(13, 14, 15, 18, 20, 21, 23) starting at: 4
-+ catch (Throwable) in ArrayBuffer(13, 14, 15, 18, 20, 21, 23, 25) starting at: 4
+- catch (Throwable) in Vector(13, 14, 15, 18, 20, 21, 23) starting at: 4
++ catch (Throwable) in Vector(13, 14, 15, 18, 20, 21, 23, 25) starting at: 4
consisting of blocks: List(9, 8, 6, 5, 4)
with finalizer: null
-- catch (<none>) in ArrayBuffer(4, 5, 6, 9, 13, 14, 15, 18, 20, 21, 23) starting at: 3
-+ catch (<none>) in ArrayBuffer(4, 5, 6, 9, 13, 14, 15, 18, 20, 21, 23, 25, 26) starting at: 3
+- catch (<none>) in Vector(4, 5, 6, 9, 13, 14, 15, 18, 20, 21, 23) starting at: 3
++ catch (<none>) in Vector(4, 5, 6, 9, 13, 14, 15, 18, 20, 21, 23, 25, 26) starting at: 3
consisting of blocks: List(3)
@@ -879,5 +945,5 @@
def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
@@ -317,8 +317,8 @@
127 CALL_METHOD scala.Predef.println (dynamic)
@@ -964,3 +1034,3 @@
with finalizer: null
-- catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16) starting at: 3
-+ catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16, 17) starting at: 3
+- catch (IllegalArgumentException) in Vector(6, 7, 8, 11, 13, 14, 16) starting at: 3
++ catch (IllegalArgumentException) in Vector(6, 7, 8, 11, 13, 14, 16, 17) starting at: 3
consisting of blocks: List(3)
@@ -988,5 +1058,5 @@
def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
diff --git a/test/files/run/lub-visibility.check b/test/files/run/lub-visibility.check
index 70734966f0..135cb3cb76 100644
--- a/test/files/run/lub-visibility.check
+++ b/test/files/run/lub-visibility.check
@@ -6,6 +6,6 @@ scala> // should infer List[scala.collection.immutable.Seq[Nothing]]
scala> // but reverted that for SI-5534.
scala> val x = List(List(), Vector())
-x: List[scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing] with java.io.Serializable] = List(List(), Vector())
+x: List[scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing] with Serializable] = List(List(), Vector())
scala> :quit
diff --git a/test/files/run/t2251b.check b/test/files/run/t2251b.check
index 4231fc6ea6..b60698d605 100644
--- a/test/files/run/t2251b.check
+++ b/test/files/run/t2251b.check
@@ -1,4 +1,4 @@
-TypeTag[List[scala.collection.immutable.LinearSeq[B[_ >: D with C <: B[_ >: D with C <: A]]] with scala.collection.AbstractSeq[B[_ >: D with C <: B[_ >: D with C <: A]]] with java.io.Serializable]]
+TypeTag[List[scala.collection.immutable.LinearSeq[B[_ >: D with C <: B[_ >: D with C <: A]]] with scala.collection.AbstractSeq[B[_ >: D with C <: B[_ >: D with C <: A]]] with Serializable]]
TypeTag[List[scala.collection.immutable.Iterable[B[_ >: F with E with D with C <: B[_ >: F with E with D with C <: A]]] with F with Int => Any]]
TypeTag[List[scala.collection.immutable.Seq[B[_ >: D with C <: B[_ >: D with C <: A]]] with scala.collection.AbstractSeq[B[_ >: D with C <: B[_ >: D with C <: A]]] with Serializable]]
TypeTag[List[scala.collection.Set[_ >: G with F <: B[_ >: G with F <: B[_ >: G with F <: A]]]]]
@@ -6,6 +6,6 @@ TypeTag[List[scala.collection.Set[_ >: G with F <: B[_ >: G with F <: B[_ >: G w
TypeTag[List[scala.collection.Set[_ >: G with F <: B[_ >: G with F <: B[_ >: G with F <: A]]]]]
TypeTag[List[Seq[B[_ >: G with F <: B[_ >: G with F <: A]]]]]
TypeTag[List[scala.collection.Map[_ >: F with C <: B[_ >: F with C <: B[_ >: F with C <: A]], B[_ >: G with D <: B[_ >: G with D <: A]]]]]
-TypeTag[List[scala.collection.AbstractSeq[B[_ >: G with F <: B[_ >: G with F <: A]]] with scala.collection.LinearSeq[B[_ >: G with F <: B[_ >: G with F <: A]]] with java.io.Serializable]]
+TypeTag[List[scala.collection.AbstractSeq[B[_ >: G with F <: B[_ >: G with F <: A]]] with scala.collection.LinearSeq[B[_ >: G with F <: B[_ >: G with F <: A]]] with Serializable]]
TypeTag[List[Seq[B[_ >: G with F <: B[_ >: G with F <: A]]]]]
TypeTag[List[Seq[B[_ >: G with F <: B[_ >: G with F <: A]]]]]
diff --git a/test/files/run/t4332.scala b/test/files/run/t4332.scala
index 5a67922911..1c7e7d73de 100644
--- a/test/files/run/t4332.scala
+++ b/test/files/run/t4332.scala
@@ -12,7 +12,7 @@ object Test extends DirectTest {
}
def isExempt(sym: Symbol) = {
- val exempt = Set("view", "repr", "sliceWithKnownDelta", "sliceWithKnownBound", "transform")
+ val exempt = Set("view", "repr", "sliceWithKnownDelta", "sliceWithKnownBound", "transform", "filterImpl")
(exempt contains sym.name.decoded)
}
diff --git a/test/files/run/t6827.check b/test/files/run/t6827.check
index 3a3a71c67d..4889e05be8 100644
--- a/test/files/run/t6827.check
+++ b/test/files/run/t6827.check
@@ -1,6 +1,6 @@
-start at -5: java.lang.IllegalArgumentException: requirement failed: start -5 out of range 10
-start at -1: java.lang.IllegalArgumentException: requirement failed: start -1 out of range 10
-start at limit: java.lang.IllegalArgumentException: requirement failed: start 10 out of range 10
+start at -5: java.lang.ArrayIndexOutOfBoundsException: -5
+start at -1: java.lang.ArrayIndexOutOfBoundsException: -1
+start at limit: ok
start at limit-1: ok
first 10: ok
read all: ok
@@ -8,8 +8,8 @@ test huge len: ok
5 from 5: ok
20 from 5: ok
test len overflow: ok
-start beyond limit: java.lang.IllegalArgumentException: requirement failed: start 30 out of range 10
+start beyond limit: ok
read 0: ok
read -1: ok
-invalid read 0: java.lang.IllegalArgumentException: requirement failed: start 30 out of range 10
-invalid read -1: java.lang.IllegalArgumentException: requirement failed: start 30 out of range 10
+invalid read 0: ok
+invalid read -1: ok
diff --git a/test/files/run/t6827.scala b/test/files/run/t6827.scala
index 8e17af09e2..eb020711bb 100644
--- a/test/files/run/t6827.scala
+++ b/test/files/run/t6827.scala
@@ -31,4 +31,24 @@ object Test extends App {
// okay, see SI-7128
"...".toIterator.copyToArray(new Array[Char](0), 0, 0)
+
+
+ // Bonus test from @som-snytt to check for overflow in
+ // index calculations.
+ def testOverflow(start: Int, len: Int, expected: List[Char]) {
+ def copyFromIterator = {
+ val arr = Array.fill[Char](3)('-')
+ "abc".toIterator.copyToArray(arr, start, len)
+ arr.toList
+ }
+ def copyFromArray = {
+ val arr = Array.fill[Char](3)('-')
+ "abc".toArray.copyToArray(arr, start, len)
+ arr.toList
+ }
+ assert(copyFromIterator == expected)
+ assert(copyFromArray == expected)
+ }
+ testOverflow(1, Int.MaxValue - 1, "-ab".toList)
+ testOverflow(1, Int.MaxValue, "-ab".toList)
}
diff --git a/test/files/run/t7521/Test.scala b/test/files/run/t7521/Test.scala
new file mode 100644
index 0000000000..e9816ad6cb
--- /dev/null
+++ b/test/files/run/t7521/Test.scala
@@ -0,0 +1,5 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ new Wrapper(new Array[Int](1))
+ }
+}
diff --git a/test/files/run/t7521/Wrapper.scala b/test/files/run/t7521/Wrapper.scala
new file mode 100644
index 0000000000..0b923f8924
--- /dev/null
+++ b/test/files/run/t7521/Wrapper.scala
@@ -0,0 +1 @@
+class Wrapper[Repr](val xs: Repr) extends AnyVal
diff --git a/test/files/run/t7521b.check b/test/files/run/t7521b.check
new file mode 100644
index 0000000000..4d96df106d
--- /dev/null
+++ b/test/files/run/t7521b.check
@@ -0,0 +1,7 @@
+= Java Erased Signatures =
+public int C.a(Wrapper)
+public int C.b(Wrapper)
+
+= Java Generic Signatures =
+public int C.a(Wrapper<int[]>)
+public int C.b(Wrapper<java.lang.Object>)
diff --git a/test/files/run/t7521b.scala b/test/files/run/t7521b.scala
new file mode 100644
index 0000000000..c9e27f28b4
--- /dev/null
+++ b/test/files/run/t7521b.scala
@@ -0,0 +1,20 @@
+class Wrapper[X](x: X)
+
+class C {
+ def a(w: Wrapper[Array[Int]]) = 0
+ def b(w: Wrapper[Int]) = 0
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val c = new C
+ c.a(new Wrapper(Array(1, 2)))
+ c.b(new Wrapper(1))
+
+ val methods = classOf[C].getDeclaredMethods.sortBy(_.getName)
+ println("= Java Erased Signatures =")
+ println(methods.mkString("\n"))
+ println("\n= Java Generic Signatures =")
+ println(methods.map(_.toGenericString).mkString("\n"))
+ }
+}
diff --git a/test/files/run/t7775.scala b/test/files/run/t7775.scala
index 48b0d89974..bc69064e17 100644
--- a/test/files/run/t7775.scala
+++ b/test/files/run/t7775.scala
@@ -1,3 +1,45 @@
+import scala.concurrent._, duration._
+import ExecutionContext.Implicits.global
+import scala.tools.reflect.WrappedProperties.AccessControl._
+import java.util.concurrent.CyclicBarrier
+
+object Test extends App {
+ @volatile var done = false
+ val barrier = new CyclicBarrier(2)
+
+ val probe = Future {
+ val attempts = 1024 // previously, failed after a few
+ def fail(i: Int) = s"Failed at $i"
+ barrier.await()
+ for (i <- 1 to attempts ; p <- systemProperties)
+ p match { case (k, v) => assert (k != null && v != null, fail(i)) }
+ }
+ probe onComplete {
+ case _ => done = true
+ }
+
+ System.setProperty("foo", "fooz")
+ System.setProperty("bar", "barz")
+ barrier.await() // just for fun, wait to start mucking with properties
+
+ // continually modify properties trying to break live iteration over sys props
+ // hint: don't iterate lively over sys props
+ var alt = true
+ while (!done) {
+ if (alt) {
+ System.getProperties.remove("foo")
+ System.setProperty("bar", "barz")
+ alt = false
+ } else {
+ System.getProperties.remove("bar")
+ System.setProperty("foo", "fooz")
+ alt = true
+ }
+ }
+ Await.result(probe, Duration.Inf)
+}
+
+/*
import scala.concurrent.{duration, Future, Await, ExecutionContext}
import scala.tools.nsc.Settings
import ExecutionContext.Implicits.global
@@ -15,3 +57,4 @@ object Test {
Await.result(compiler, duration.Duration.Inf)
}
}
+*/
diff --git a/test/files/run/t8575.scala b/test/files/run/t8575.scala
new file mode 100644
index 0000000000..fb8f603f3e
--- /dev/null
+++ b/test/files/run/t8575.scala
@@ -0,0 +1,32 @@
+class E[F]
+class A
+class B
+class C
+
+trait TypeMember {
+ type X
+
+ // This call throws an AbstractMethodError, because it invokes the erasure of
+ // consume(X): Unit that is consume(Object): Unit. But the corresponding
+ // bridge method is not generated.
+ consume(value)
+
+ def value: X
+ def consume(x: X): Unit
+}
+
+object Test extends TypeMember {
+ type F = A with B
+
+ // works if replaced by type X = E[A with B with C]
+ type X = E[F with C]
+
+ def value = new E[F with C]
+
+ // This call passes, since it invokes consume(E): Unit
+ def consume(x: X) {}
+
+ def main(args: Array[String]) {
+ consume(value)
+ }
+}
diff --git a/test/files/run/t8575b.scala b/test/files/run/t8575b.scala
new file mode 100644
index 0000000000..0d731ccf9f
--- /dev/null
+++ b/test/files/run/t8575b.scala
@@ -0,0 +1,17 @@
+class A
+class B
+class C
+
+object Test {
+ type F = A with B
+
+ def main(args: Array[String]) {
+ import reflect.runtime.universe._
+ val t1 = typeOf[F with C]
+ val t2 = typeOf[(A with B) with C]
+ val t3 = typeOf[A with B with C]
+ assert(t1 =:= t2)
+ assert(t2 =:= t3)
+ assert(t3 =:= t1)
+ }
+}
diff --git a/test/files/run/t8575c.scala b/test/files/run/t8575c.scala
new file mode 100644
index 0000000000..8219952299
--- /dev/null
+++ b/test/files/run/t8575c.scala
@@ -0,0 +1,23 @@
+class C
+
+trait TypeMember {
+ type X
+ type Y
+ type Z
+}
+
+object Test extends TypeMember {
+ type A = X with Y
+ type B = Z with A
+ type F = A with B
+
+ def main(args: Array[String]) {
+ import reflect.runtime.universe._
+ val t1 = typeOf[F with C]
+ val t2 = typeOf[(A with B) with C]
+ val t3 = typeOf[A with B with C]
+ assert(t1 =:= t2)
+ assert(t2 =:= t3)
+ assert(t3 =:= t1)
+ }
+}
diff --git a/test/files/run/t8710.scala b/test/files/run/t8710.scala
new file mode 100644
index 0000000000..15aab5b8a4
--- /dev/null
+++ b/test/files/run/t8710.scala
@@ -0,0 +1,17 @@
+class Bar(val x: Int) extends AnyVal {
+ def f: String = f(0)
+ private def f(x: Int): String = ""
+}
+
+class Baz(val x: Int) extends AnyVal {
+ def f: String = "123"
+ private def f(x: Int): String = ""
+}
+object Baz {
+ def x(b: Baz) = b.f(0)
+}
+
+object Test extends App {
+ new Bar(23).f
+ new Baz(23).f
+}
diff --git a/test/files/run/t8764.check b/test/files/run/t8764.check
deleted file mode 100644
index 6260069602..0000000000
--- a/test/files/run/t8764.check
+++ /dev/null
@@ -1,5 +0,0 @@
-IntOnly: should return an unboxed int
-Int: int
-IntAndDouble: should just box and return Anyval
-Double: class java.lang.Double
-Int: class java.lang.Integer
diff --git a/test/files/run/t8764.flags b/test/files/run/t8764.flags
deleted file mode 100644
index 48fd867160..0000000000
--- a/test/files/run/t8764.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xexperimental
diff --git a/test/files/run/t8764.scala b/test/files/run/t8764.scala
deleted file mode 100644
index decc658f6e..0000000000
--- a/test/files/run/t8764.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-object Test extends App {
-case class IntOnly(i: Int, j: Int)
-
-println("IntOnly: should return an unboxed int")
-val a = IntOnly(1, 2)
-val i: Int = a.productElement(0)
-println(s"Int: ${a.productElement(0).getClass}")
-
-case class IntAndDouble(i: Int, d: Double)
-
-println("IntAndDouble: should just box and return Anyval")
-val b = IntAndDouble(1, 2.0)
-val j: AnyVal = b.productElement(0)
-println(s"Double: ${b.productElement(1).getClass}")
-println(s"Int: ${b.productElement(0).getClass}")
-}
diff --git a/test/files/run/t8918-unary-ids.scala b/test/files/run/t8918-unary-ids.scala
new file mode 100644
index 0000000000..1f29abe464
--- /dev/null
+++ b/test/files/run/t8918-unary-ids.scala
@@ -0,0 +1,49 @@
+
+
+import scala.tools.partest.SessionTest
+
+// Taking unary ids as plain
+object Test extends SessionTest {
+ def session =
+"""Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> val - = 42
+-: Int = 42
+
+scala> val i = -
+i: Int = 42
+
+scala> - { 42 }
+res0: Int = -42
+
+scala> - if (true) 1 else 2
+<console>:1: error: illegal start of simple expression
+ - if (true) 1 else 2
+ ^
+
+scala> - - 1
+<console>:1: error: ';' expected but integer literal found.
+ - - 1
+ ^
+
+scala> -.-(1)
+res1: Int = 41
+
+scala> -
+res2: Int = 42
+
+scala> - -
+res3: Int = -42
+
+scala> + -
+res4: Int = 42
+
+scala> object X { def -(i: Int) = 42 - i ; def f(g: Int => Int) = g(7) ; def j = f(-) }
+defined object X
+
+scala> X.j
+res5: Int = 35
+
+scala> :quit"""
+}
diff --git a/test/files/run/t8944/A_1.scala b/test/files/run/t8944/A_1.scala
new file mode 100644
index 0000000000..7ff80327b0
--- /dev/null
+++ b/test/files/run/t8944/A_1.scala
@@ -0,0 +1 @@
+case class A(private val x: String)
diff --git a/test/files/run/t8944/A_2.scala b/test/files/run/t8944/A_2.scala
new file mode 100644
index 0000000000..3dcdea1583
--- /dev/null
+++ b/test/files/run/t8944/A_2.scala
@@ -0,0 +1,6 @@
+case class Other(private val x: String) // consume a fresh name suffix
+
+// the param accessor will now be called "x$2",
+// whereas the previously compiled client expects it to be called
+// x$1
+case class A(private val x: String)
diff --git a/test/files/run/t8944/Test_1.scala b/test/files/run/t8944/Test_1.scala
new file mode 100644
index 0000000000..fe466693cf
--- /dev/null
+++ b/test/files/run/t8944/Test_1.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ val A("") = new A("")
+}
diff --git a/test/files/run/t8944b.scala b/test/files/run/t8944b.scala
new file mode 100644
index 0000000000..f469122ce6
--- /dev/null
+++ b/test/files/run/t8944b.scala
@@ -0,0 +1,9 @@
+case class A(private var foo: Any) {
+ def m = { def foo = 42 /*will be lamba lifted to `A#foo$1`*/ }
+}
+object Test {
+ def main(args: Array[String]): Unit = {
+ val A("") = new A("")
+ new A("").m
+ }
+}
diff --git a/test/files/run/t8944c.check b/test/files/run/t8944c.check
new file mode 100644
index 0000000000..7738f76980
--- /dev/null
+++ b/test/files/run/t8944c.check
@@ -0,0 +1,5 @@
+private java.lang.Object Foo.ant()
+public java.lang.Object Foo.ant$access$0()
+private scala.collection.Seq Foo.cat()
+public scala.collection.Seq Foo.cat$access$2()
+public java.lang.Object Foo.elk()
diff --git a/test/files/run/t8944c.scala b/test/files/run/t8944c.scala
new file mode 100644
index 0000000000..95c2143851
--- /dev/null
+++ b/test/files/run/t8944c.scala
@@ -0,0 +1,8 @@
+case class Foo[A](private val ant: Any, elk: Any, private val cat: A*)
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ def pred(name: String) = Set("ant", "elk", "cat").exists(name contains _)
+ println(classOf[Foo[_]].getDeclaredMethods.filter(m => pred(m.getName)).sortBy(_.getName).mkString("\n"))
+ }
+}
diff --git a/test/files/run/t8955.scala b/test/files/run/t8955.scala
new file mode 100644
index 0000000000..afa31aa5d7
--- /dev/null
+++ b/test/files/run/t8955.scala
@@ -0,0 +1,12 @@
+import scala.collection.parallel.immutable.ParSet
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ for (i <- 1 to 2000) test()
+ }
+
+ def test() {
+ ParSet[Int]((1 to 10000): _*) foreach (x => ()) // hangs non deterministically
+ }
+}
+
diff --git a/test/files/run/t9174.check b/test/files/run/t9174.check
new file mode 100644
index 0000000000..9320a081ff
--- /dev/null
+++ b/test/files/run/t9174.check
@@ -0,0 +1,19 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> import scala.util.{Success, Failure}
+import scala.util.{Success, Failure}
+
+scala> def f1(b: Boolean) = if (b) Left(1) else Right(2)
+f1: (b: Boolean)scala.util.Either[Int,Int]
+
+scala> def f2(b: Boolean) = if (b) Nil else 1 :: Nil
+f2: (b: Boolean)List[Int]
+
+scala> def f3(b: Boolean) = if (b) Stream.Empty else new Stream.Cons(1, Stream.Empty)
+f3: (b: Boolean)scala.collection.immutable.Stream[Int]
+
+scala> def f4(b: Boolean) = if (b) Success(1) else Failure(new Exception(""))
+f4: (b: Boolean)scala.util.Try[Int]
+
+scala> :quit
diff --git a/test/files/run/t9174.scala b/test/files/run/t9174.scala
new file mode 100644
index 0000000000..0c70e9bca9
--- /dev/null
+++ b/test/files/run/t9174.scala
@@ -0,0 +1,11 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+ |import scala.util.{Success, Failure}
+ |def f1(b: Boolean) = if (b) Left(1) else Right(2)
+ |def f2(b: Boolean) = if (b) Nil else 1 :: Nil
+ |def f3(b: Boolean) = if (b) Stream.Empty else new Stream.Cons(1, Stream.Empty)
+ |def f4(b: Boolean) = if (b) Success(1) else Failure(new Exception(""))
+ |""".stripMargin
+}
diff --git a/test/files/run/t9200/Test.java b/test/files/run/t9200/Test.java
new file mode 100644
index 0000000000..8ff0314f6c
--- /dev/null
+++ b/test/files/run/t9200/Test.java
@@ -0,0 +1,6 @@
+public class Test {
+ public static void main(String[] args) {
+ new C1(new C2()); // Was NoSuchMethodError
+ }
+}
+
diff --git a/test/files/run/t9200/test.scala b/test/files/run/t9200/test.scala
new file mode 100644
index 0000000000..6fa7e91571
--- /dev/null
+++ b/test/files/run/t9200/test.scala
@@ -0,0 +1,12 @@
+trait W
+
+trait T1
+trait T2 extends T1
+
+object O1 {
+ type t = T1 with T2
+}
+
+class C1[w<:W](o: O1.t)
+
+class C2 extends T1 with T2
diff --git a/test/files/scalacheck/concurrent-map.scala b/test/files/scalacheck/concurrent-map.scala
new file mode 100755
index 0000000000..7c9b8d4169
--- /dev/null
+++ b/test/files/scalacheck/concurrent-map.scala
@@ -0,0 +1,76 @@
+
+
+
+import java.util.concurrent._
+import scala.collection._
+import scala.collection.JavaConverters._
+import org.scalacheck._
+import org.scalacheck.Prop._
+import org.scalacheck.Gen._
+
+
+
+case class Wrap(i: Int) {
+ override def hashCode = i * 0x9e3775cd
+}
+
+
+object Test extends Properties("concurrent.TrieMap") {
+
+ /* generators */
+
+ val sizes = choose(0, 20000)
+
+ val threadCounts = choose(2, 16)
+
+ val threadCountsAndSizes = for {
+ p <- threadCounts
+ sz <- sizes
+ } yield (p, sz);
+
+
+ /* helpers */
+
+ def inParallel[T](totalThreads: Int)(body: Int => T): Seq[T] = {
+ val threads = for (idx <- 0 until totalThreads) yield new Thread {
+ setName("ParThread-" + idx)
+ private var res: T = _
+ override def run() {
+ res = body(idx)
+ }
+ def result = {
+ this.join()
+ res
+ }
+ }
+
+ threads foreach (_.start())
+ threads map (_.result)
+ }
+
+ property("concurrent getOrElseUpdate insertions") = forAll(threadCounts, sizes) {
+ (p, sz) =>
+ val chm = new ConcurrentHashMap[Wrap, Int]().asScala
+
+ val results = inParallel(p) {
+ idx =>
+ for (i <- 0 until sz) yield chm.getOrElseUpdate(new Wrap(i), idx)
+ }
+
+ val resultSets = for (i <- 0 until sz) yield results.map(_(i)).toSet
+ val largerThanOne = resultSets.zipWithIndex.find(_._1.size != 1)
+ val allThreadsAgreeOnWhoInserted = {
+ largerThanOne == None
+ } :| s"$p threads agree on who inserted [disagreement (differentResults, position) = $largerThanOne]"
+
+ allThreadsAgreeOnWhoInserted
+ }
+
+
+}
+
+
+
+
+
+
diff --git a/test/junit/scala/collection/SeqViewTest.scala b/test/junit/scala/collection/SeqViewTest.scala
new file mode 100644
index 0000000000..24474fc4b9
--- /dev/null
+++ b/test/junit/scala/collection/SeqViewTest.scala
@@ -0,0 +1,16 @@
+package scala.collection
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Assert._
+import org.junit.Test
+
+@RunWith(classOf[JUnit4])
+class SeqViewTest {
+
+ @Test
+ def test_SI8691() {
+ // Really just testing to make sure ++: doesn't throw an exception
+ assert( Seq(1,2) ++: Seq(3,4).view == Seq(1,2,3,4) )
+ }
+}
diff --git a/test/junit/scala/collection/SetMapConsistencyTest.scala b/test/junit/scala/collection/SetMapConsistencyTest.scala
index 261c11a98b..0749e61c09 100644
--- a/test/junit/scala/collection/SetMapConsistencyTest.scala
+++ b/test/junit/scala/collection/SetMapConsistencyTest.scala
@@ -529,4 +529,15 @@ class SetMapConsistencyTest {
assert(nit == 4)
assert(nfe == 4)
}
+
+ @Test
+ def test_SI8727() {
+ import scala.tools.testing.AssertUtil._
+ type NSEE = NoSuchElementException
+ val map = Map(0 -> "zero", 1 -> "one")
+ val m = map.filterKeys(i => if (map contains i) true else throw new NSEE)
+ assert{ (m contains 0) && (m get 0).nonEmpty }
+ assertThrows[NSEE]{ m contains 2 }
+ assertThrows[NSEE]{ m get 2 }
+ }
}
diff --git a/test/junit/scala/collection/convert/NullSafetyTest.scala b/test/junit/scala/collection/convert/NullSafetyTest.scala
new file mode 100644
index 0000000000..de5481d9e2
--- /dev/null
+++ b/test/junit/scala/collection/convert/NullSafetyTest.scala
@@ -0,0 +1,279 @@
+package scala.collection.convert
+
+import java.{util => ju, lang => jl}
+import ju.{concurrent => juc}
+
+import org.junit.Test
+import org.junit.experimental.runners.Enclosed
+import org.junit.runner.RunWith
+
+import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
+import scala.collection.{mutable, concurrent}
+
+@RunWith(classOf[Enclosed])
+object NullSafetyTest {
+
+ /*
+ * Pertinent: SI-9113
+ * Tests to insure that wrappers return null instead of wrapping it as a collection
+ */
+
+ class ToScala {
+
+ @Test def testIteratorWrapping(): Unit = {
+ val nullJIterator: ju.Iterator[AnyRef] = null
+ val iterator: Iterator[AnyRef] = nullJIterator
+
+ assert(iterator == null)
+ }
+
+ @Test def testEnumerationWrapping(): Unit = {
+ val nullJEnumeration: ju.Enumeration[AnyRef] = null
+ val enumeration: Iterator[AnyRef] = nullJEnumeration
+
+ assert(enumeration == null)
+ }
+
+ @Test def testIterableWrapping(): Unit = {
+ val nullJIterable: jl.Iterable[AnyRef] = null
+ val iterable: Iterable[AnyRef] = nullJIterable
+
+ assert(iterable == null)
+ }
+
+ @Test def testCollectionWrapping(): Unit = {
+ val nullJCollection: ju.Collection[AnyRef] = null
+ val collection: Iterable[AnyRef] = nullJCollection
+
+ assert(collection == null)
+ }
+
+ @Test def testBufferWrapping(): Unit = {
+ val nullJList: ju.List[AnyRef] = null
+ val buffer: mutable.Buffer[AnyRef] = nullJList
+
+ assert(buffer == null)
+ }
+
+ @Test def testSetWrapping(): Unit = {
+ val nullJSet: ju.Set[AnyRef] = null
+ val set: mutable.Set[AnyRef] = nullJSet
+
+ assert(set == null)
+ }
+
+ @Test def testMapWrapping(): Unit = {
+ val nullJMap: ju.Map[AnyRef, AnyRef] = null
+ val map: mutable.Map[AnyRef, AnyRef] = nullJMap
+
+ assert(map == null)
+ }
+
+ @Test def testConcurrentMapWrapping(): Unit = {
+ val nullJConMap: juc.ConcurrentMap[AnyRef, AnyRef] = null
+ val conMap: concurrent.Map[AnyRef, AnyRef] = nullJConMap
+
+ assert(conMap == null)
+ }
+
+ @Test def testDictionaryWrapping(): Unit = {
+ val nullJDict: ju.Dictionary[AnyRef, AnyRef] = null
+ val dict: mutable.Map[AnyRef, AnyRef] = nullJDict
+
+ assert(dict == null)
+ }
+
+
+ @Test def testPropertyWrapping(): Unit = {
+ val nullJProps: ju.Properties = null
+ val props: mutable.Map[String, String] = nullJProps
+
+ assert(props == null)
+ }
+
+ @Test def testIteratorDecoration(): Unit = {
+ val nullJIterator: ju.Iterator[AnyRef] = null
+
+ assert(nullJIterator.asScala == null)
+ }
+
+ @Test def testEnumerationDecoration(): Unit = {
+ val nullJEnumeration: ju.Enumeration[AnyRef] = null
+
+ assert(nullJEnumeration.asScala == null)
+ }
+
+ @Test def testIterableDecoration(): Unit = {
+ val nullJIterable: jl.Iterable[AnyRef] = null
+
+ assert(nullJIterable.asScala == null)
+ }
+
+ @Test def testCollectionDecoration(): Unit = {
+ val nullJCollection: ju.Collection[AnyRef] = null
+
+ assert(nullJCollection.asScala == null)
+ }
+
+ @Test def testBufferDecoration(): Unit = {
+ val nullJBuffer: ju.List[AnyRef] = null
+
+ assert(nullJBuffer.asScala == null)
+ }
+
+ @Test def testSetDecoration(): Unit = {
+ val nullJSet: ju.Set[AnyRef] = null
+
+ assert(nullJSet.asScala == null)
+ }
+
+ @Test def testMapDecoration(): Unit = {
+ val nullJMap: ju.Map[AnyRef, AnyRef] = null
+
+ assert(nullJMap.asScala == null)
+ }
+
+ @Test def testConcurrentMapDecoration(): Unit = {
+ val nullJConMap: juc.ConcurrentMap[AnyRef, AnyRef] = null
+
+ assert(nullJConMap.asScala == null)
+ }
+
+ @Test def testDictionaryDecoration(): Unit = {
+ val nullJDict: ju.Dictionary[AnyRef, AnyRef] = null
+
+ assert(nullJDict.asScala == null)
+ }
+
+ @Test def testPropertiesDecoration(): Unit = {
+ val nullJProperties: ju.Properties = null
+
+ assert(nullJProperties.asScala == null)
+ }
+ }
+
+ class ToJava {
+
+ @Test def testIteratorWrapping(): Unit = {
+ val nullIterator: Iterator[AnyRef] = null
+ val jIterator: ju.Iterator[AnyRef] = nullIterator
+
+ assert(jIterator == null)
+ }
+
+ @Test def testEnumerationWrapping(): Unit = {
+ val nullEnumeration: Iterator[AnyRef] = null
+ val enumeration: ju.Iterator[AnyRef] = nullEnumeration
+
+ assert(enumeration == null)
+ }
+
+ @Test def testIterableWrapping(): Unit = {
+ val nullIterable: Iterable[AnyRef] = null
+ val iterable: jl.Iterable[AnyRef] = asJavaIterable(nullIterable)
+
+ assert(iterable == null)
+ }
+
+ @Test def testCollectionWrapping(): Unit = {
+ val nullCollection: Iterable[AnyRef] = null
+ val collection: ju.Collection[AnyRef] = nullCollection
+
+ assert(collection == null)
+ }
+
+ @Test def testBufferWrapping(): Unit = {
+ val nullList: mutable.Buffer[AnyRef] = null
+ val buffer: ju.List[AnyRef] = nullList
+
+ assert(buffer == null)
+ }
+
+ @Test def testSetWrapping(): Unit = {
+ val nullSet: mutable.Set[AnyRef] = null
+ val set: ju.Set[AnyRef] = nullSet
+
+ assert(set == null)
+ }
+
+ @Test def testMapWrapping(): Unit = {
+ val nullMap: mutable.Map[AnyRef, AnyRef] = null
+ val map: ju.Map[AnyRef, AnyRef] = nullMap
+
+ assert(map == null)
+ }
+
+ @Test def testConcurrentMapWrapping(): Unit = {
+ val nullConMap: concurrent.Map[AnyRef, AnyRef] = null
+ val conMap: juc.ConcurrentMap[AnyRef, AnyRef] = nullConMap
+
+ assert(conMap == null)
+ }
+
+ @Test def testDictionaryWrapping(): Unit = {
+ val nullDict: mutable.Map[AnyRef, AnyRef] = null
+ val dict: ju.Dictionary[AnyRef, AnyRef] = nullDict
+
+ assert(dict == null)
+ }
+
+ // Implicit conversion to ju.Properties is not available
+
+ @Test def testIteratorDecoration(): Unit = {
+ val nullIterator: Iterator[AnyRef] = null
+
+ assert(nullIterator.asJava == null)
+ }
+
+ @Test def testEnumerationDecoration(): Unit = {
+ val nullEnumeration: Iterator[AnyRef] = null
+
+ assert(nullEnumeration.asJavaEnumeration == null)
+ }
+
+ @Test def testIterableDecoration(): Unit = {
+ val nullIterable: Iterable[AnyRef] = null
+
+ assert(nullIterable.asJava == null)
+ }
+
+ @Test def testCollectionDecoration(): Unit = {
+ val nullCollection: Iterable[AnyRef] = null
+
+ assert(nullCollection.asJavaCollection == null)
+ }
+
+ @Test def testBufferDecoration(): Unit = {
+ val nullBuffer: mutable.Buffer[AnyRef] = null
+
+ assert(nullBuffer.asJava == null)
+ }
+
+ @Test def testSetDecoration(): Unit = {
+ val nullSet: Set[AnyRef] = null
+
+ assert(nullSet.asJava == null)
+ }
+
+ @Test def testMapDecoration(): Unit = {
+ val nullMap: mutable.Map[AnyRef, AnyRef] = null
+
+ assert(nullMap.asJava == null)
+ }
+
+ @Test def testConcurrentMapDecoration(): Unit = {
+ val nullConMap: concurrent.Map[AnyRef, AnyRef] = null
+
+ assert(nullConMap.asJava == null)
+ }
+
+ @Test def testDictionaryDecoration(): Unit = {
+ val nullDict: mutable.Map[AnyRef, AnyRef] = null
+
+ assert(nullDict.asJavaDictionary == null)
+ }
+
+ // Decorator conversion to ju.Properties is not available
+ }
+}
diff --git a/test/junit/scala/collection/immutable/StreamTest.scala b/test/junit/scala/collection/immutable/StreamTest.scala
new file mode 100644
index 0000000000..437cbc8926
--- /dev/null
+++ b/test/junit/scala/collection/immutable/StreamTest.scala
@@ -0,0 +1,108 @@
+package scala.collection.immutable
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import org.junit.Assert._
+
+import scala.ref.WeakReference
+import scala.util.Try
+
+@RunWith(classOf[JUnit4])
+class StreamTest {
+
+ @Test
+ def t6727_and_t6440(): Unit = {
+ assertTrue(Stream.continually(()).filter(_ => true).take(2) == Seq((), ()))
+ assertTrue(Stream.continually(()).filterNot(_ => false).take(2) == Seq((), ()))
+ assertTrue(Stream(1,2,3,4,5).filter(_ < 4) == Seq(1,2,3))
+ assertTrue(Stream(1,2,3,4,5).filterNot(_ > 4) == Seq(1,2,3,4))
+ }
+
+ /** Test helper to verify that the given Stream operation allows
+ * GC of the head during processing of the tail.
+ */
+ def assertStreamOpAllowsGC(op: (=> Stream[Int], Int => Unit) => Any, f: Int => Unit): Unit = {
+ val msgSuccessGC = "GC success"
+ val msgFailureGC = "GC failure"
+
+ // A stream of 500 elements at most. We will test that the head can be collected
+ // while processing the tail. After each element we will GC and wait 10 ms, so a
+ // failure to collect will take roughly 5 seconds.
+ val ref = WeakReference( Stream.from(1).take(500) )
+
+ def gcAndThrowIfCollected(n: Int): Unit = {
+ System.gc() // try to GC
+ Thread.sleep(10) // give it 10 ms
+ if (ref.get.isEmpty) throw new RuntimeException(msgSuccessGC) // we're done if head collected
+ f(n)
+ }
+
+ val res = Try { op(ref(), gcAndThrowIfCollected) }.failed // success is indicated by an
+ val msg = res.map(_.getMessage).getOrElse(msgFailureGC) // exception with expected message
+ // failure is indicated by no
+ assertTrue(msg == msgSuccessGC) // exception, or one with different message
+ }
+
+ @Test
+ def foreach_allows_GC() {
+ assertStreamOpAllowsGC(_.foreach(_), _ => ())
+ }
+
+ @Test
+ def filter_all_foreach_allows_GC() {
+ assertStreamOpAllowsGC(_.filter(_ => true).foreach(_), _ => ())
+ }
+
+ @Test // SI-8990
+ def withFilter_after_first_foreach_allows_GC: Unit = {
+ assertStreamOpAllowsGC(_.withFilter(_ > 1).foreach(_), _ => ())
+ }
+
+ @Test // SI-8990
+ def withFilter_after_first_withFilter_foreach_allows_GC: Unit = {
+ assertStreamOpAllowsGC(_.withFilter(_ > 1).withFilter(_ < 100).foreach(_), _ => ())
+ }
+
+ @Test // SI-8990
+ def withFilter_can_retry_after_exception_thrown_in_filter: Unit = {
+ // use mutable state to control an intermittent failure in filtering the Stream
+ var shouldThrow = true
+
+ val wf = Stream.from(1).take(10).withFilter { n =>
+ if (shouldThrow && n == 5) throw new RuntimeException("n == 5") else n > 5
+ }
+
+ assertTrue( Try { wf.map(identity) }.isFailure ) // throws on n == 5
+
+ shouldThrow = false // won't throw next time
+
+ assertTrue( wf.map(identity).length == 5 ) // success instead of NPE
+ }
+
+ /** Test helper to verify that the given Stream operation is properly lazy in the tail */
+ def assertStreamOpLazyInTail(op: (=> Stream[Int]) => Stream[Int], expectedEvaluated: List[Int]): Unit = {
+ // mutable state to record every strict evaluation
+ var evaluated: List[Int] = Nil
+
+ def trackEffectsOnNaturals: Stream[Int] = {
+ def loop(i: Int): Stream[Int] = { evaluated ++= List(i); i #:: loop(i + 1) }
+ loop(1)
+ }
+
+ // call op on a stream which records every strict evaluation
+ val result = op(trackEffectsOnNaturals)
+
+ assertTrue( evaluated == expectedEvaluated )
+ }
+
+ @Test // SI-9134
+ def filter_map_properly_lazy_in_tail: Unit = {
+ assertStreamOpLazyInTail(_.filter(_ % 2 == 0).map(identity), List(1, 2))
+ }
+
+ @Test // SI-9134
+ def withFilter_map_properly_lazy_in_tail: Unit = {
+ assertStreamOpLazyInTail(_.withFilter(_ % 2 == 0).map(identity), List(1, 2))
+ }
+}
diff --git a/test/junit/scala/sys/process/t7350.scala b/test/junit/scala/sys/process/t7350.scala
new file mode 100644
index 0000000000..7f3e8897f2
--- /dev/null
+++ b/test/junit/scala/sys/process/t7350.scala
@@ -0,0 +1,298 @@
+
+package scala.sys.process
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import java.io.{InputStream, OutputStream, PipedInputStream, PipedOutputStream, ByteArrayInputStream,
+ ByteArrayOutputStream, IOException, Closeable}
+import java.lang.reflect.InvocationTargetException
+import scala.concurrent.{Await, Future}
+import scala.concurrent.duration.{Duration, SECONDS}
+import scala.concurrent.ExecutionContext.Implicits.global
+import scala.util.control.Exception.ignoring
+
+// Each test normally ends in a moment, but for failure cases, waits until one second.
+
+@RunWith(classOf[JUnit4])
+class PipedProcessTest {
+ class ProcessMock(error: Boolean) extends Process {
+ var destroyCount = 0
+ def exitValue(): Int = {
+ if (error) {
+ throw new InterruptedException()
+ }
+ 0
+ }
+ def destroy(): Unit = { destroyCount += 1 }
+ }
+
+ class ProcessBuilderMock(process: Process, error: Boolean) extends ProcessBuilder.AbstractBuilder {
+ override def run(io: ProcessIO): Process = {
+ if (error) {
+ throw new IOException()
+ }
+ process
+ }
+ }
+
+ class PipeSinkMock extends Process.PipeSink("PipeSinkMock") {
+ var releaseCount = 0
+ override val pipe = null
+ override val sink = null
+ override def run(): Unit = {}
+ override def connectOut(out: OutputStream): Unit = {}
+ override def connectIn(pipeOut: PipedOutputStream): Unit = {}
+ override def release(): Unit = { releaseCount += 1 }
+ }
+
+ class PipeSourceMock extends Process.PipeSource("PipeSourceMock") {
+ var releaseCount = 0
+ override val pipe = null
+ override val source = null
+ override def run(): Unit = {}
+ override def connectIn(in: InputStream): Unit = {}
+ override def connectOut(sink: Process.PipeSink): Unit = {}
+ override def release(): Unit = { releaseCount += 1 }
+ }
+
+ class PipedProcesses(a: ProcessBuilder, b: ProcessBuilder, defaultIO: ProcessIO, toError: Boolean)
+ extends Process.PipedProcesses(a, b, defaultIO, toError) {
+ def callRunAndExitValue(source: Process.PipeSource, sink: Process.PipeSink) = {
+ val m = classOf[Process.PipedProcesses].getDeclaredMethod("runAndExitValue", classOf[Process.PipeSource], classOf[Process.PipeSink])
+ m.setAccessible(true)
+ try m.invoke(this, source, sink).asInstanceOf[Option[Int]]
+ catch {
+ case err: InvocationTargetException => throw err.getTargetException
+ }
+ }
+ }
+
+ // PipedProcesses need not to release resources when it normally end
+ @Test
+ def normallyEnd() {
+ val io = BasicIO(false, ProcessLogger(_ => ()))
+ val source = new PipeSourceMock
+ val sink = new PipeSinkMock
+ val a = new ProcessMock(error = false)
+ val b = new ProcessMock(error = false)
+ val p = new PipedProcesses(new ProcessBuilderMock(a, error = false), new ProcessBuilderMock(b, error = false), io, false)
+ val f = Future {
+ p.callRunAndExitValue(source, sink)
+ }
+ Await.result(f, Duration(1, SECONDS))
+ assert(source.releaseCount == 0)
+ assert(sink.releaseCount == 0)
+ assert(a.destroyCount == 0)
+ assert(b.destroyCount == 0)
+ }
+
+ // PipedProcesses must release resources when b.run() failed
+ @Test
+ def bFailed() {
+ val io = BasicIO(false, ProcessLogger(_ => ()))
+ val source = new PipeSourceMock
+ val sink = new PipeSinkMock
+ val a = new ProcessMock(error = false)
+ val b = new ProcessMock(error = false)
+ val p = new PipedProcesses(new ProcessBuilderMock(a, error = false), new ProcessBuilderMock(b, error = true), io, false)
+ val f = Future {
+ ignoring(classOf[IOException]) {
+ p.callRunAndExitValue(source, sink)
+ }
+ }
+ Await.result(f, Duration(1, SECONDS))
+ assert(source.releaseCount == 1)
+ assert(sink.releaseCount == 1)
+ assert(a.destroyCount == 0)
+ assert(b.destroyCount == 0)
+ }
+
+ // PipedProcesses must release resources when a.run() failed
+ @Test
+ def aFailed() {
+ val io = BasicIO(false, ProcessLogger(_ => ()))
+ val source = new PipeSourceMock
+ val sink = new PipeSinkMock
+ val a = new ProcessMock(error = false)
+ val b = new ProcessMock(error = false)
+ val p = new PipedProcesses(new ProcessBuilderMock(a, error = true), new ProcessBuilderMock(b, error = false), io, false)
+ val f = Future {
+ ignoring(classOf[IOException]) {
+ p.callRunAndExitValue(source, sink)
+ }
+ }
+ Await.result(f, Duration(1, SECONDS))
+ assert(source.releaseCount == 1)
+ assert(sink.releaseCount == 1)
+ assert(a.destroyCount == 0)
+ assert(b.destroyCount == 1)
+ }
+
+ // PipedProcesses must release resources when interrupted during waiting for first.exitValue()
+ @Test
+ def firstInterrupted() {
+ val io = BasicIO(false, ProcessLogger(_ => ()))
+ val source = new PipeSourceMock
+ val sink = new PipeSinkMock
+ val a = new ProcessMock(error = true)
+ val b = new ProcessMock(error = false)
+ val p = new PipedProcesses(new ProcessBuilderMock(a, error = false), new ProcessBuilderMock(b, error = false), io, false)
+ val f = Future {
+ p.callRunAndExitValue(source, sink)
+ }
+ Await.result(f, Duration(1, SECONDS))
+ assert(source.releaseCount == 1)
+ assert(sink.releaseCount == 1)
+ assert(a.destroyCount == 1)
+ assert(b.destroyCount == 1)
+ }
+
+ // PipedProcesses must release resources when interrupted during waiting for second.exitValue()
+ @Test
+ def secondInterrupted() {
+ val io = BasicIO(false, ProcessLogger(_ => ()))
+ val source = new PipeSourceMock
+ val sink = new PipeSinkMock
+ val a = new ProcessMock(error = false)
+ val b = new ProcessMock(error = true)
+ val p = new PipedProcesses(new ProcessBuilderMock(a, error = false), new ProcessBuilderMock(b, error = false), io, false)
+ val f = Future {
+ p.callRunAndExitValue(source, sink)
+ }
+ Await.result(f, Duration(1, SECONDS))
+ assert(source.releaseCount == 1)
+ assert(sink.releaseCount == 1)
+ assert(a.destroyCount == 1)
+ assert(b.destroyCount == 1)
+ }
+}
+
+@RunWith(classOf[JUnit4])
+class PipeSourceSinkTest {
+ def throwsIOException(f: => Unit) = {
+ try { f; false }
+ catch { case _: IOException => true }
+ }
+
+ class PipeSink extends Process.PipeSink("TestPipeSink") {
+ def ensureRunloopStarted() = {
+ while (sink.size() > 0) {
+ Thread.sleep(1)
+ }
+ }
+ def isReleased = {
+ val field = classOf[Process.PipeSink].getDeclaredField("pipe")
+ field.setAccessible(true)
+ val pipe = field.get(this).asInstanceOf[PipedInputStream]
+ !this.isAlive && throwsIOException { pipe.read() }
+ }
+ }
+
+ class PipeSource extends Process.PipeSource("TestPipeSource") {
+ def ensureRunloopStarted() = {
+ while (source.size() > 0) {
+ Thread.sleep(1)
+ }
+ }
+ def isReleased = {
+ val field = classOf[Process.PipeSource].getDeclaredField("pipe")
+ field.setAccessible(true)
+ val pipe = field.get(this).asInstanceOf[PipedOutputStream]
+ !this.isAlive && throwsIOException { pipe.write(1) }
+ }
+ }
+
+ trait CloseChecking extends Closeable {
+ var closed = false
+ override def close() = closed = true
+ }
+ class DebugOutputStream extends ByteArrayOutputStream with CloseChecking
+ class DebugInputStream(s: String) extends ByteArrayInputStream(s.getBytes()) with CloseChecking
+ class DebugInfinityInputStream extends InputStream with CloseChecking {
+ def read() = 1
+ }
+
+ def sourceSink() = {
+ val source = new PipeSource
+ val sink = new PipeSink
+ source connectOut sink
+ source.start()
+ sink.start()
+ (source, sink)
+ }
+
+ // PipeSource and PipeSink must release resources when it normally end
+ @Test
+ def normallyEnd() {
+ val in = new DebugInputStream("aaa")
+ val (source, sink) = sourceSink()
+ val out = new DebugOutputStream
+ source connectIn in
+ sink connectOut out
+ val f = Future {
+ source.join()
+ sink.join()
+ }
+ Await.result(f, Duration(1, SECONDS))
+ assert(in.closed == true)
+ assert(out.closed == true)
+ assert(source.isReleased == true)
+ assert(sink.isReleased == true)
+ }
+
+ // PipeSource and PipeSink must release resources when interrupted during waiting for source.take()
+ @Test
+ def sourceInterrupted() {
+ val (source, sink) = sourceSink()
+ val out = new DebugOutputStream
+ sink connectOut out
+ val f = Future {
+ sink.ensureRunloopStarted()
+ source.release()
+ sink.release()
+ }
+ Await.result(f, Duration(1, SECONDS))
+ assert(out.closed == true)
+ assert(source.isReleased == true)
+ assert(sink.isReleased == true)
+ }
+
+ // PipeSource and PipeSink must release resources when interrupted during waiting for sink.take()
+ @Test
+ def sinkInterrupted() {
+ val in = new DebugInputStream("aaa")
+ val (source, sink) = sourceSink()
+ source connectIn in
+ val f = Future {
+ source.ensureRunloopStarted()
+ source.release()
+ sink.release()
+ }
+ Await.result(f, Duration(1, SECONDS))
+ assert(in.closed == true)
+ assert(source.isReleased == true)
+ assert(sink.isReleased == true)
+ }
+
+ // PipeSource and PipeSink must release resources when interrupted during copy streams"
+ @Test
+ def runloopInterrupted() {
+ val in = new DebugInfinityInputStream
+ val (source, sink) = sourceSink()
+ val out = new DebugOutputStream
+ source connectIn in
+ sink connectOut out
+ val f = Future {
+ source.ensureRunloopStarted()
+ sink.ensureRunloopStarted()
+ source.release()
+ sink.release()
+ }
+ Await.result(f, Duration(1, SECONDS))
+ assert(in.closed == true)
+ assert(out.closed == true)
+ assert(source.isReleased == true)
+ assert(sink.isReleased == true)
+ }
+}
diff --git a/test/junit/scala/tools/nsc/settings/ScalaVersionTest.scala b/test/junit/scala/tools/nsc/settings/ScalaVersionTest.scala
index 77a2da828e..acbf39fe23 100644
--- a/test/junit/scala/tools/nsc/settings/ScalaVersionTest.scala
+++ b/test/junit/scala/tools/nsc/settings/ScalaVersionTest.scala
@@ -13,6 +13,48 @@ class ScalaVersionTest {
@Test def versionUnparse() {
val v = "2.11.3"
- assertEquals(ScalaVersion(v).unparse, v)
+ assertEquals(v, ScalaVersion(v).unparse)
+ assertEquals("2.11.3-RC4", ScalaVersion("2.11.3-rc4").unparse)
+ }
+
+ // SI-9167
+ @Test def `version parses with rigor`() {
+ import settings.{ SpecificScalaVersion => V }
+ import ScalaVersion._
+
+ // no-brainers
+ assertEquals(V(2,11,7,Final), ScalaVersion("2.11.7"))
+ assertEquals(V(2,11,7,Final), ScalaVersion("2.11.7-FINAL"))
+ assertEquals(V(2,11,7,Milestone(3)), ScalaVersion("2.11.7-M3"))
+ assertEquals(V(2,11,7,RC(3)), ScalaVersion("2.11.7-RC3"))
+ assertEquals(V(2,11,7,Development("devbuild")), ScalaVersion("2.11.7-devbuild"))
+
+ // partial-brainers
+ assertEquals(V(2,11,7,Milestone(3)), ScalaVersion("2.11.7-m3"))
+ assertEquals(V(2,11,7,RC(3)), ScalaVersion("2.11.7-rc3"))
+ assertEquals(V(2,11,7,Development("maybegood")), ScalaVersion("2.11.7-maybegood"))
+ assertEquals(V(2,11,7,Development("RCCola")), ScalaVersion("2.11.7-RCCola"))
+ assertEquals(V(2,11,7,Development("RC1.5")), ScalaVersion("2.11.7-RC1.5"))
+ assertEquals(V(2,11,7,Development("")), ScalaVersion("2.11.7-"))
+ assertEquals(V(2,11,7,Development("0.5")), ScalaVersion("2.11.7-0.5"))
+ assertEquals(V(2,11,7,Development("devbuild\nSI-9167")), ScalaVersion("2.11.7-devbuild\nSI-9167"))
+ assertEquals(V(2,11,7,Development("final")), ScalaVersion("2.11.7-final"))
+
+ // oh really
+ assertEquals(NoScalaVersion, ScalaVersion("none"))
+ assertEquals(AnyScalaVersion, ScalaVersion("any"))
+
+ assertThrows[NumberFormatException] { ScalaVersion("2.11.7.2") }
+ assertThrows[NumberFormatException] { ScalaVersion("2.11.7.beta") }
+ assertThrows[NumberFormatException] { ScalaVersion("2.x.7") }
+ assertThrows[NumberFormatException] { ScalaVersion("2.-11.7") }
+ assertThrows[NumberFormatException] { ScalaVersion("2. ") }
+ assertThrows[NumberFormatException] { ScalaVersion("2.1 .7") }
+ assertThrows[NumberFormatException] { ScalaVersion("2.") }
+ assertThrows[NumberFormatException] { ScalaVersion("2..") }
+ assertThrows[NumberFormatException] { ScalaVersion("2...") }
+ assertThrows[NumberFormatException] { ScalaVersion("2-") }
+ assertThrows[NumberFormatException] { ScalaVersion("2-.") } // scalacheck territory
+ assertThrows[NumberFormatException] { ScalaVersion("any.7") }
}
}
diff --git a/test/scaladoc/run/t7905.check b/test/scaladoc/run/t7905.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/t7905.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/t7905.scala b/test/scaladoc/run/t7905.scala
new file mode 100644
index 0000000000..8570724470
--- /dev/null
+++ b/test/scaladoc/run/t7905.scala
@@ -0,0 +1,36 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+ override def code = """
+ object A {
+ val foo = new B {
+ val bar = new C {
+ val baz: A.this.type = A.this
+ }
+ }
+ }
+
+ trait B {
+ type E = bar.D
+
+ val bar: C
+ }
+
+ trait C {
+ trait D
+ }
+
+ trait G {
+ type F = A.foo.E
+
+ def m(f: F) = f match {
+ case _: A.foo.bar.D => // error here
+ }
+ }
+ """
+
+ def scaladocSettings = ""
+
+ def testModel(root: Package) = ()
+}
diff --git a/test/scaladoc/scalacheck/HtmlFactoryTest.scala b/test/scaladoc/scalacheck/HtmlFactoryTest.scala
index 51633be440..6b59355991 100644
--- a/test/scaladoc/scalacheck/HtmlFactoryTest.scala
+++ b/test/scaladoc/scalacheck/HtmlFactoryTest.scala
@@ -779,6 +779,11 @@ object Test extends Properties("HtmlFactory") {
linkElement \@ "href" == expectedUrl && linkElement \@ "target" == "_top"
}
+ def assertValuesLink(memberName: String, expectedUrl: String): Boolean = {
+ val linkElement: NodeSeq = node \\ "div" \@ ("class", "values members") \\ "li" \@ ("name", memberName) \\ "span" \@ ("class", "permalink") \ "a"
+ linkElement \@ "href" == expectedUrl && linkElement \@ "target" == "_top"
+ }
+
}
val files = createTemplates("SI-8144.scala")
@@ -791,12 +796,12 @@ object Test extends Properties("HtmlFactory") {
property("SI-8144: Members' permalink - package") = check("some/package.html") { node =>
("type link" |: node.assertTypeLink("../index.html#some.package")) &&
- ("member: some.pack" |: node.assertMemberLink("values")("some.pack", "../index.html#some.package@pack"))
+ ("member: some.pack" |: node.assertValuesLink("some.pack", "../index.html#some.package@pack"))
}
property("SI-8144: Members' permalink - inner package") = check("some/pack/package.html") { node =>
("type link" |: node.assertTypeLink("../../index.html#some.pack.package")) &&
- ("member: SomeType (object)" |: node.assertMemberLink("values")("some.pack.SomeType", "../../index.html#some.pack.package@SomeType")) &&
+ ("member: SomeType (object)" |: node.assertValuesLink("some.pack.SomeType", "../../index.html#some.pack.package@SomeType")) &&
("member: SomeType (class)" |: node.assertMemberLink("types")("some.pack.SomeType", "../../index.html#some.pack.package@SomeTypeextendsAnyRef"))
}
@@ -809,8 +814,8 @@ object Test extends Properties("HtmlFactory") {
("type link" |: node.assertTypeLink("../../index.html#some.pack.SomeType")) &&
("constructor " |: node.assertMemberLink("constructors")("some.pack.SomeType#<init>", "../../index.html#some.pack.SomeType@<init>(arg:String):some.pack.SomeType")) &&
( "member: type TypeAlias" |: node.assertMemberLink("types")("some.pack.SomeType.TypeAlias", "../../index.html#some.pack.SomeType@TypeAlias=String")) &&
- ( "member: def >#<():Int " |: node.assertMemberLink("values")("some.pack.SomeType#>#<", "../../index.html#some.pack.SomeType@>#<():Int")) &&
- ( "member: def >@<():TypeAlias " |: node.assertMemberLink("values")("some.pack.SomeType#>@<", "../../index.html#some.pack.SomeType@>@<():SomeType.this.TypeAlias"))
+ ( "member: def >#<():Int " |: node.assertValuesLink("some.pack.SomeType#>#<", "../../index.html#some.pack.SomeType@>#<():Int")) &&
+ ( "member: def >@<():TypeAlias " |: node.assertValuesLink("some.pack.SomeType#>@<", "../../index.html#some.pack.SomeType@>@<():SomeType.this.TypeAlias"))
}
}
diff --git a/versions.properties b/versions.properties
index fa08e56346..6f7fb95ac4 100644
--- a/versions.properties
+++ b/versions.properties
@@ -1,4 +1,4 @@
-#Wed, 23 Jul 2014 08:37:26 +0200
+#Tue, 20 May 2014 10:01:37 +0200
# NOTE: this file determines the content of the scala-distribution
# via scala-dist-pom.xml and scala-library-all-pom.xml
# when adding new properties that influence a release,