summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--bincompat-backward.whitelist.conf9
-rw-r--r--bincompat-forward.whitelist.conf97
-rw-r--r--build.number4
-rwxr-xr-xbuild.xml5
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala6
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala20
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala57
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala15
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala33
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala6
-rw-r--r--src/eclipse/partest/.classpath2
-rw-r--r--src/eclipse/repl/.classpath1
-rw-r--r--src/eclipse/scaladoc/.classpath6
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java3
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java5
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java1
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java3
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java1
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java1
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java2
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java1
-rw-r--r--src/forkjoin/scala/concurrent/util/Unsafe.java6
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Global.scala15
-rw-r--r--src/library/scala/PartialFunction.scala4
-rw-r--r--src/library/scala/Predef.scala2
-rw-r--r--src/library/scala/beans/BeanInfo.scala1
-rw-r--r--src/library/scala/collection/IterableViewLike.scala5
-rw-r--r--src/library/scala/collection/Iterator.scala86
-rw-r--r--src/library/scala/collection/MapLike.scala21
-rw-r--r--src/library/scala/collection/SeqViewLike.scala22
-rw-r--r--src/library/scala/collection/SetLike.scala15
-rw-r--r--src/library/scala/collection/TraversableLike.scala2
-rw-r--r--src/library/scala/collection/TraversableViewLike.scala22
-rw-r--r--src/library/scala/collection/immutable/HashSet.scala30
-rw-r--r--src/library/scala/collection/immutable/List.scala4
-rw-r--r--src/library/scala/collection/immutable/ListMap.scala19
-rw-r--r--src/library/scala/collection/immutable/Map.scala22
-rw-r--r--src/library/scala/collection/immutable/PagedSeq.scala2
-rw-r--r--src/library/scala/collection/immutable/Range.scala19
-rw-r--r--src/library/scala/collection/immutable/Stream.scala24
-rw-r--r--src/library/scala/collection/immutable/StreamViewLike.scala2
-rw-r--r--src/library/scala/collection/mutable/AnyRefMap.scala18
-rw-r--r--src/library/scala/collection/mutable/ArrayOps.scala5
-rw-r--r--src/library/scala/collection/mutable/ArraySeq.scala2
-rw-r--r--src/library/scala/collection/mutable/BufferLike.scala7
-rw-r--r--src/library/scala/collection/mutable/BufferProxy.scala2
-rw-r--r--src/library/scala/collection/mutable/ListBuffer.scala7
-rw-r--r--src/library/scala/collection/mutable/LongMap.scala20
-rw-r--r--src/library/scala/collection/mutable/MapLike.scala12
-rw-r--r--src/library/scala/collection/mutable/ResizableArray.scala2
-rw-r--r--src/library/scala/collection/mutable/SetLike.scala11
-rw-r--r--src/library/scala/collection/parallel/RemainsIterator.scala9
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashSet.scala2
-rw-r--r--src/library/scala/concurrent/BlockContext.scala15
-rw-r--r--src/library/scala/concurrent/ExecutionContext.scala36
-rw-r--r--src/library/scala/concurrent/Future.scala469
-rw-r--r--src/library/scala/concurrent/Promise.scala16
-rw-r--r--src/library/scala/concurrent/impl/AbstractPromise.java40
-rw-r--r--src/library/scala/concurrent/impl/ExecutionContextImpl.scala208
-rw-r--r--src/library/scala/concurrent/impl/Future.scala34
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala167
-rw-r--r--src/library/scala/io/Source.scala4
-rw-r--r--src/library/scala/runtime/BoxesRunTime.java2
-rw-r--r--src/library/scala/runtime/ScalaRunTime.scala2
-rw-r--r--src/library/scala/sys/process/BasicIO.scala2
-rw-r--r--src/library/scala/sys/process/ProcessImpl.scala134
-rw-r--r--src/library/scala/sys/process/package.scala30
-rw-r--r--src/library/scala/util/Try.scala116
-rw-r--r--src/reflect/scala/reflect/internal/Mirrors.scala11
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala1
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala16
-rw-r--r--src/reflect/scala/reflect/internal/TreeGen.scala4
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala18
-rw-r--r--src/reflect/scala/reflect/internal/transform/Erasure.scala11
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala8
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala24
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala10
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js20
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala10
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala2
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala2
-rw-r--r--test/files/jvm/future-spec.check2
-rw-r--r--test/files/jvm/future-spec/FutureTests.scala273
-rw-r--r--test/files/jvm/scala-concurrent-tck.check1
-rw-r--r--test/files/jvm/scala-concurrent-tck.scala124
-rw-r--r--test/files/jvm/t7146.scala2
-rw-r--r--test/files/jvm/t8582.check3
-rw-r--r--test/files/jvm/t8582.flags1
-rw-r--r--test/files/neg/beanInfoDeprecation.check6
-rw-r--r--test/files/neg/beanInfoDeprecation.flags1
-rw-r--r--test/files/neg/beanInfoDeprecation.scala2
-rw-r--r--test/files/neg/t8764.check6
-rw-r--r--test/files/neg/t8764.flags1
-rw-r--r--test/files/neg/t8764.scala9
-rw-r--r--test/files/neg/t8849.check7
-rw-r--r--test/files/neg/t8849.scala10
-rw-r--r--test/files/pos/alladin763.scala37
-rw-r--r--test/files/pos/t8462.scala11
-rw-r--r--test/files/pos/t8862a.scala47
-rw-r--r--test/files/pos/t8862b.scala12
-rw-r--r--test/files/run/future-flatmap-exec-count.check1
-rw-r--r--test/files/run/inline-ex-handlers.check24
-rw-r--r--test/files/run/lub-visibility.check2
-rw-r--r--test/files/run/t2251b.check4
-rw-r--r--test/files/run/t4332.scala2
-rw-r--r--test/files/run/t6827.check12
-rw-r--r--test/files/run/t6827.scala20
-rw-r--r--test/files/run/t7521/Test.scala5
-rw-r--r--test/files/run/t7521/Wrapper.scala1
-rw-r--r--test/files/run/t7521b.check7
-rw-r--r--test/files/run/t7521b.scala20
-rw-r--r--test/files/run/t8575.scala33
-rw-r--r--test/files/run/t8575b.scala17
-rw-r--r--test/files/run/t8575c.scala23
-rw-r--r--test/files/run/t8710.scala17
-rw-r--r--test/files/run/t8764.check5
-rw-r--r--test/files/run/t8764.flags1
-rw-r--r--test/files/run/t8764.scala16
-rw-r--r--test/files/run/t8944/A_1.scala1
-rw-r--r--test/files/run/t8944/A_2.scala6
-rw-r--r--test/files/run/t8944/Test_1.scala3
-rw-r--r--test/files/run/t8944b.scala9
-rw-r--r--test/files/run/t8944c.check5
-rw-r--r--test/files/run/t8944c.scala8
-rw-r--r--test/files/run/t8955.scala12
-rw-r--r--test/files/run/t9030.scala19
-rw-r--r--test/junit/scala/collection/SeqViewTest.scala16
-rw-r--r--test/junit/scala/collection/SetMapConsistencyTest.scala11
-rw-r--r--test/junit/scala/collection/immutable/StreamTest.scala18
-rw-r--r--test/junit/scala/sys/process/t7350.scala298
-rw-r--r--test/scaladoc/run/t7905.check1
-rw-r--r--test/scaladoc/run/t7905.scala36
-rw-r--r--test/scaladoc/scalacheck/HtmlFactoryTest.scala13
-rw-r--r--versions.properties14
141 files changed, 2505 insertions, 854 deletions
diff --git a/bincompat-backward.whitelist.conf b/bincompat-backward.whitelist.conf
index 076b9bb9aa..6c98dc62a1 100644
--- a/bincompat-backward.whitelist.conf
+++ b/bincompat-backward.whitelist.conf
@@ -186,6 +186,15 @@ filter {
matchName="scala.reflect.runtime.SynchronizedOps.newNestedScope"
problemName=MissingMethodProblem
},
+ // see github.com/scala/scala/pull/3925, SI-8627, SI-6440
+ {
+ matchName="scala.collection.TraversableLike.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.immutable.Stream.filteredTail"
+ problemName=MissingMethodProblem
+ },
// https://github.com/scala/scala/pull/3848 -- SI-8680
{
matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$6"
diff --git a/bincompat-forward.whitelist.conf b/bincompat-forward.whitelist.conf
index 53401eefad..87a59f2d53 100644
--- a/bincompat-forward.whitelist.conf
+++ b/bincompat-forward.whitelist.conf
@@ -272,6 +272,103 @@ filter {
matchName="scala.reflect.api.PredefTypeCreator"
problemName=MissingClassProblem
},
+ // see github.com/scala/scala/pull/3925, SI-8627, SI-6440
+ {
+ matchName="scala.collection.IterableViewLike#AbstractTransformed.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.AbstractTraversable.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.TraversableViewLike#AbstractTransformed.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.TraversableLike.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.SeqViewLike#AbstractTransformed.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.immutable.TreeSet.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.immutable.Stream.filteredTail"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.immutable.Stream.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.immutable.Stream.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.immutable.StringOps.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.immutable.TreeMap.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.concurrent.TrieMap.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.mutable.ArrayOps#ofByte.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.mutable.ArrayOps#ofLong.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.mutable.ArrayOps#ofUnit.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.mutable.ArrayOps#ofInt.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.mutable.ArrayOps#ofChar.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.mutable.ArrayOps#ofRef.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.mutable.ArrayOps#ofDouble.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.mutable.ArrayOps#ofFloat.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.mutable.ArrayOps#ofBoolean.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.mutable.ArrayOps#ofShort.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.mutable.TreeSet.filterImpl"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.io.AbstractFile.filterImpl"
+ problemName=MissingMethodProblem
+ },
// https://github.com/scala/scala/pull/3848 -- SI-8680
{
matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$6"
diff --git a/build.number b/build.number
index 21ce660958..142ae5027d 100644
--- a/build.number
+++ b/build.number
@@ -1,7 +1,7 @@
#Tue Sep 11 19:21:09 CEST 2007
version.major=2
-version.minor=11
-version.patch=3
+version.minor=12
+version.patch=0
# This is the -N part of a version. if it's 0, it's dropped from maven versions.
version.bnum=0
diff --git a/build.xml b/build.xml
index eb86b4fb1a..4589d991d3 100755
--- a/build.xml
+++ b/build.xml
@@ -1537,8 +1537,9 @@ TODO:
<target name="test.bc-opt" description="Optimized version of test.bc."> <optimized name="test.bc"/></target>
<target name="test.bc" depends="bc.prepare, pack.lib, pack.reflect" unless="test.bc.skip">
- <bc.check project="library"/>
- <bc.check project="reflect"/>
+ <echo message="binary compatibility testing disabled in the 2.12.x branch"/>
+ <!-- <bc.check project="library"/> -->
+ <!-- <bc.check project="reflect"/> -->
</target>
<!-- ===========================================================================
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index d9f56b47fa..a1cec2ee0b 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -1138,7 +1138,7 @@ abstract class GenICode extends SubComponent {
// a package here, check if there's a package object.
val sym = (
if (!tree.symbol.isPackageClass) tree.symbol
- else tree.symbol.info.member(nme.PACKAGE) match {
+ else tree.symbol.info.packageObject match {
case NoSymbol => abort("Cannot use package as value: " + tree)
case s =>
devWarning(s"Found ${tree.symbol} where a package object is required. Converting to ${s.moduleClass}")
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
index daf36ce374..89d7acaa11 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
@@ -915,7 +915,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
def genLoadModule(tree: Tree): BType = {
val module = (
if (!tree.symbol.isPackageClass) tree.symbol
- else tree.symbol.info.member(nme.PACKAGE) match {
+ else tree.symbol.info.packageObject match {
case NoSymbol => abort(s"SI-5604: Cannot use package as value: $tree")
case s => abort(s"SI-5604: found package class where package object expected: $tree")
}
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index c291961447..e47fdac938 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -446,8 +446,10 @@ abstract class ExplicitOuter extends InfoTransform
//
// See SI-6552 for an example of why `sym.owner.enclMethod hasAnnotation ScalaInlineClass`
// is not suitable; if we make a method-local class non-private, it mangles outer pointer names.
- if (currentClass != sym.owner ||
- (closestEnclMethod(currentOwner) hasAnnotation ScalaInlineClass))
+ def enclMethodIsInline = closestEnclMethod(currentOwner) hasAnnotation ScalaInlineClass
+ // SI-8710 The extension method condition reflects our knowledge that a call to `new Meter(12).privateMethod`
+ // with later be rewritten (in erasure) to `Meter.privateMethod$extension(12)`.
+ if ((currentClass != sym.owner || enclMethodIsInline) && !sym.isMethodWithExtension)
sym.makeNotPrivate(sym.owner)
val qsym = qual.tpe.widen.typeSymbol
diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
index 228c9da624..6349fc3fb9 100644
--- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
@@ -208,7 +208,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
def makeExtensionMethodSymbol = {
val extensionName = extensionNames(origMeth).head.toTermName
val extensionMeth = (
- companion.moduleClass.newMethod(extensionName, tree.pos.focus, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL)
+ companion.moduleClass.newMethod(extensionName, tree.pos.focus, origMeth.flags & ~OVERRIDE & ~PROTECTED & ~PRIVATE & ~LOCAL | FINAL)
setAnnotations origMeth.annotations
)
origMeth.removeAnnotation(TailrecClass) // it's on the extension method, now.
diff --git a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
index 56ed0ee16c..2f4771e9d4 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
@@ -75,7 +75,7 @@ abstract class ConstantFolder {
case nme.AND => Constant(x.booleanValue & y.booleanValue)
case nme.EQ => Constant(x.booleanValue == y.booleanValue)
case nme.NE => Constant(x.booleanValue != y.booleanValue)
- case _ => null
+ case _ => null
}
private def foldSubrangeOp(op: Name, x: Constant, y: Constant): Constant = op match {
case nme.OR => Constant(x.intValue | y.intValue)
@@ -95,14 +95,20 @@ abstract class ConstantFolder {
case nme.MUL => Constant(x.intValue * y.intValue)
case nme.DIV => Constant(x.intValue / y.intValue)
case nme.MOD => Constant(x.intValue % y.intValue)
- case _ => null
+ case _ => null
}
private def foldLongOp(op: Name, x: Constant, y: Constant): Constant = op match {
case nme.OR => Constant(x.longValue | y.longValue)
case nme.XOR => Constant(x.longValue ^ y.longValue)
case nme.AND => Constant(x.longValue & y.longValue)
- case nme.LSL => Constant(x.longValue << y.longValue)
+ case nme.LSL if x.tag <= IntTag
+ => Constant(x.intValue << y.longValue)
+ case nme.LSL => Constant(x.longValue << y.longValue)
+ case nme.LSR if x.tag <= IntTag
+ => Constant(x.intValue >>> y.longValue)
case nme.LSR => Constant(x.longValue >>> y.longValue)
+ case nme.ASR if x.tag <= IntTag
+ => Constant(x.intValue >> y.longValue)
case nme.ASR => Constant(x.longValue >> y.longValue)
case nme.EQ => Constant(x.longValue == y.longValue)
case nme.NE => Constant(x.longValue != y.longValue)
@@ -115,7 +121,7 @@ abstract class ConstantFolder {
case nme.MUL => Constant(x.longValue * y.longValue)
case nme.DIV => Constant(x.longValue / y.longValue)
case nme.MOD => Constant(x.longValue % y.longValue)
- case _ => null
+ case _ => null
}
private def foldFloatOp(op: Name, x: Constant, y: Constant): Constant = op match {
case nme.EQ => Constant(x.floatValue == y.floatValue)
@@ -129,7 +135,7 @@ abstract class ConstantFolder {
case nme.MUL => Constant(x.floatValue * y.floatValue)
case nme.DIV => Constant(x.floatValue / y.floatValue)
case nme.MOD => Constant(x.floatValue % y.floatValue)
- case _ => null
+ case _ => null
}
private def foldDoubleOp(op: Name, x: Constant, y: Constant): Constant = op match {
case nme.EQ => Constant(x.doubleValue == y.doubleValue)
@@ -143,7 +149,7 @@ abstract class ConstantFolder {
case nme.MUL => Constant(x.doubleValue * y.doubleValue)
case nme.DIV => Constant(x.doubleValue / y.doubleValue)
case nme.MOD => Constant(x.doubleValue % y.doubleValue)
- case _ => null
+ case _ => null
}
private def foldBinop(op: Name, x: Constant, y: Constant): Constant = {
@@ -162,7 +168,7 @@ abstract class ConstantFolder {
case _ => null
}
catch {
- case ex: ArithmeticException => null
+ case _: ArithmeticException => null
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index eb29ccf4e1..da0ae4ee79 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -808,10 +808,11 @@ trait Contexts { self: Analyzer =>
private def collectImplicitImports(imp: ImportInfo): List[ImplicitInfo] = {
val qual = imp.qual
+ val qualSym = qual.tpe.typeSymbol
val pre =
- if (qual.tpe.typeSymbol.isPackageClass)
+ if (qualSym.isPackageClass)
// SI-6225 important if the imported symbol is inherited by the the package object.
- singleType(qual.tpe, qual.tpe member nme.PACKAGE)
+ qualSym.packageObject.typeOfThis
else
qual.tpe
def collect(sels: List[ImportSelector]): List[ImplicitInfo] = sels match {
@@ -884,7 +885,8 @@ trait Contexts { self: Analyzer =>
Some(collectImplicitImports(imports.head))
} else if (owner.isPackageClass) {
// the corresponding package object may contain implicit members.
- Some(collectImplicits(owner.tpe.implicitMembers, owner.tpe))
+ val pre = owner.packageObject.typeOfThis
+ Some(collectImplicits(pre.implicitMembers, pre))
} else Some(Nil)
}
@@ -954,52 +956,11 @@ trait Contexts { self: Analyzer =>
private def importedAccessibleSymbol(imp: ImportInfo, name: Name, requireExplicit: Boolean): Symbol =
imp.importedSymbol(name, requireExplicit) filter (s => isAccessible(s, imp.qual.tpe, superAccess = false))
- /** Is `sym` defined in package object of package `pkg`?
- * Since sym may be defined in some parent of the package object,
- * we cannot inspect its owner only; we have to go through the
- * info of the package object. However to avoid cycles we'll check
- * what other ways we can before pushing that way.
+ /** Must `sym` defined in package object of package `pkg`, if
+ * it selected from a prefix with `pkg` as its type symbol?
*/
- def isInPackageObject(sym: Symbol, pkg: Symbol): Boolean = {
- def uninitialized(what: String) = {
- log(s"Cannot look for $sym in package object of $pkg; $what is not initialized.")
- false
- }
- def pkgClass = if (pkg.isTerm) pkg.moduleClass else pkg
- def matchesInfo = (
- // need to be careful here to not get a cyclic reference during bootstrap
- if (pkg.isInitialized) {
- val module = pkg.info member nme.PACKAGEkw
- if (module.isInitialized)
- module.info.member(sym.name).alternatives contains sym
- else
- uninitialized("" + module)
- }
- else uninitialized("" + pkg)
- )
- def inPackageObject(sym: Symbol) = (
- // To be in the package object, one of these must be true:
- // 1) sym.owner is a package object class, and sym.owner.owner is the package class for `pkg`
- // 2) sym.owner is inherited by the correct package object class
- // We try to establish 1) by inspecting the owners directly, and then we try
- // to rule out 2), and only if both those fail do we resort to looking in the info.
- !sym.hasPackageFlag && sym.owner.exists && (
- if (sym.owner.isPackageObjectClass)
- sym.owner.owner == pkgClass
- else
- !sym.owner.isPackageClass && matchesInfo
- )
- )
-
- // An overloaded symbol might not have the expected owner!
- // The alternatives must be inspected directly.
- pkgClass.isPackageClass && (
- if (sym.isOverloaded)
- sym.alternatives forall (isInPackageObject(_, pkg))
- else
- inPackageObject(sym)
- )
- }
+ def isInPackageObject(sym: Symbol, pkg: Symbol): Boolean =
+ pkg.isPackage && sym.owner != pkg
def isNameInScope(name: Name) = lookupSymbol(name, _ => true).isSuccess
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 74c28122a1..7ed4fe1f88 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -1012,15 +1012,12 @@ trait Implicits {
}
case None =>
if (pre.isStable && !pre.typeSymbol.isExistentiallyBound) {
- val companion = companionSymbolOf(sym, context)
- companion.moduleClass match {
- case mc: ModuleClassSymbol =>
- val infos =
- for (im <- mc.implicitMembers.toList) yield new ImplicitInfo(im.name, singleType(pre, companion), im)
- if (infos.nonEmpty)
- infoMap += (sym -> infos)
- case _ =>
- }
+ val pre1 =
+ if (sym.isPackageClass) sym.packageObject.typeOfThis
+ else singleType(pre, companionSymbolOf(sym, context))
+ val infos = pre1.implicitMembers.iterator.map(mem => new ImplicitInfo(mem.name, pre1, mem)).toList
+ if (infos.nonEmpty)
+ infoMap += (sym -> infos)
}
val bts = tp.baseTypeSeq
var i = 1
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index bedca88974..26517587f3 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -84,24 +84,6 @@ trait SyntheticMethods extends ast.TreeDSL {
def accessors = clazz.caseFieldAccessors
val arity = accessors.size
- // If this is ProductN[T1, T2, ...], accessorLub is the lub of T1, T2, ..., .
- // !!! Hidden behind -Xexperimental due to bummer type inference bugs.
- // Refining from Iterator[Any] leads to types like
- //
- // Option[Int] { def productIterator: Iterator[String] }
- //
- // appearing legitimately, but this breaks invariant places
- // like Tags and Arrays which are not robust and infer things
- // which they shouldn't.
- val accessorLub = (
- if (settings.Xexperimental) {
- global.lub(accessors map (_.tpe.finalResultType)) match {
- case RefinedType(parents, decls) if !decls.isEmpty => intersectionType(parents)
- case tp => tp
- }
- }
- else AnyTpe
- )
def forwardToRuntime(method: Symbol): Tree =
forwardMethod(method, getMember(ScalaRunTimeModule, (method.name prepend "_")))(mkThis :: _)
@@ -122,8 +104,8 @@ trait SyntheticMethods extends ast.TreeDSL {
}
}
def productIteratorMethod = {
- createMethod(nme.productIterator, iteratorOfType(accessorLub))(_ =>
- gen.mkMethodCall(ScalaRunTimeModule, nme.typedProductIterator, List(accessorLub), List(mkThis))
+ createMethod(nme.productIterator, iteratorOfType(AnyTpe))(_ =>
+ gen.mkMethodCall(ScalaRunTimeModule, nme.typedProductIterator, List(AnyTpe), List(mkThis))
)
}
@@ -243,7 +225,7 @@ trait SyntheticMethods extends ast.TreeDSL {
List(
Product_productPrefix -> (() => constantNullary(nme.productPrefix, clazz.name.decode)),
Product_productArity -> (() => constantNullary(nme.productArity, arity)),
- Product_productElement -> (() => perElementMethod(nme.productElement, accessorLub)(mkThisSelect)),
+ Product_productElement -> (() => perElementMethod(nme.productElement, AnyTpe)(mkThisSelect)),
Product_iterator -> (() => productIteratorMethod),
Product_canEqual -> (() => canEqualMethod)
// This is disabled pending a reimplementation which doesn't add any
@@ -377,7 +359,14 @@ trait SyntheticMethods extends ast.TreeDSL {
for (ddef @ DefDef(_, _, _, _, _, _) <- templ.body ; if isRewrite(ddef.symbol)) {
val original = ddef.symbol
- val newAcc = deriveMethod(ddef.symbol, name => context.unit.freshTermName(name + "$")) { newAcc =>
+ val i = original.owner.caseFieldAccessors.indexOf(original)
+ def freshAccessorName = {
+ devWarning(s"Unable to find $original among case accessors of ${original.owner}: ${original.owner.caseFieldAccessors}")
+ context.unit.freshTermName(original.name + "$")
+ }
+ def nameSuffixedByParamIndex = original.name.append(nme.CASE_ACCESSOR + "$" + i).toTermName
+ val newName = if (i < 0) freshAccessorName else nameSuffixedByParamIndex
+ val newAcc = deriveMethod(ddef.symbol, name => newName) { newAcc =>
newAcc.makePublic
newAcc resetFlag (ACCESSOR | PARAMACCESSOR | OVERRIDE)
ddef.rhs.duplicate
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index aae2d24b32..20db85e665 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -538,7 +538,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
}
val qual = typedQualifier { atPos(tree.pos.makeTransparent) {
tree match {
- case Ident(_) => Ident(rootMirror.getPackageObjectWithMember(pre, sym))
+ case Ident(_) =>
+ val packageObject =
+ if (sym.owner.isModuleClass) sym.owner.sourceModule // historical optimization, perhaps no longer needed
+ else pre.typeSymbol.packageObject
+ Ident(packageObject)
case Select(qual, _) => Select(qual, nme.PACKAGEkw)
case SelectFromTypeTree(qual, _) => Select(qual, nme.PACKAGEkw)
}
diff --git a/src/eclipse/partest/.classpath b/src/eclipse/partest/.classpath
index 7f28868d95..7e2f119193 100644
--- a/src/eclipse/partest/.classpath
+++ b/src/eclipse/partest/.classpath
@@ -5,7 +5,7 @@
<classpathentry combineaccessrules="false" kind="src" path="/repl"/>
<classpathentry kind="var" path="M2_REPO/com/googlecode/java-diff-utils/diffutils/1.3.0/diffutils-1.3.0.jar"/>
<classpathentry kind="var" path="M2_REPO/org/scala-sbt/test-interface/1.0/test-interface-1.0.jar"/>
- <classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-partest_2.11.0-M7/1.0.0-RC8/scala-partest_2.11.0-M7-1.0.0-RC8.jar"/>
+ <classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-partest_2.11/1.0.0/scala-partest_2.11-1.0.0.jar"/>
<classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
diff --git a/src/eclipse/repl/.classpath b/src/eclipse/repl/.classpath
index 8ff9aabfbf..cbaabb9af1 100644
--- a/src/eclipse/repl/.classpath
+++ b/src/eclipse/repl/.classpath
@@ -3,7 +3,6 @@
<classpathentry kind="src" path="repl"/>
<classpathentry combineaccessrules="false" kind="src" path="/asm"/>
<classpathentry kind="var" path="M2_REPO/jline/jline/2.12/jline-2.12.jar"/>
- <!-- <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/repl/jline-2.12.jar"/> -->
<classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
diff --git a/src/eclipse/scaladoc/.classpath b/src/eclipse/scaladoc/.classpath
index c8f0e89b8a..ee6427176a 100644
--- a/src/eclipse/scaladoc/.classpath
+++ b/src/eclipse/scaladoc/.classpath
@@ -6,8 +6,8 @@
<classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
<classpathentry combineaccessrules="false" kind="src" path="/partest-extras"/>
- <classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-xml_2.11.0-M7/1.0.0-RC7/scala-xml_2.11.0-M7-1.0.0-RC7.jar"/>
- <classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-parser-combinators_2.11.0-M7/1.0.0-RC5/scala-parser-combinators_2.11.0-M7-1.0.0-RC5.jar"/>
- <classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-partest_2.11.0-M7/1.0.0-RC8/scala-partest_2.11.0-M7-1.0.0-RC8.jar"/>
+ <classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-xml_2.11/1.0.2/scala-xml_2.11-1.0.2.jar"/>
+ <classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-parser-combinators_2.11/1.0.1/scala-parser-combinators_2.11-1.0.1.jar"/>
+ <classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-partest_2.11/1.0.0/scala-partest_2.11-1.0.0.jar"/>
<classpathentry kind="output" path="build-quick-scaladoc"/>
</classpath>
diff --git a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java
index 6578504155..9bd378c61c 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java
@@ -23,6 +23,7 @@ import java.util.concurrent.TimeUnit;
* @since 1.8
* @author Doug Lea
*/
+@Deprecated
/*public*/ abstract class CountedCompleter<T> extends ForkJoinTask<T> {
private static final long serialVersionUID = 5232453752276485070L;
@@ -471,6 +472,7 @@ import java.util.concurrent.TimeUnit;
* @since 1.7
* @author Doug Lea
*/
+@Deprecated
public class ForkJoinPool extends AbstractExecutorService {
/*
@@ -3578,6 +3580,7 @@ public class ForkJoinPool extends AbstractExecutorService {
* }
* }}</pre>
*/
+ @Deprecated
public static interface ManagedBlocker {
/**
* Possibly blocks the current thread, for example waiting for
diff --git a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java
index fd1e132b07..b4f5c24ca9 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java
@@ -180,6 +180,7 @@ import java.lang.reflect.Constructor;
* @since 1.7
* @author Doug Lea
*/
+@Deprecated
public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
/*
@@ -391,6 +392,7 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
* any ForkJoinPool will call helpExpungeStaleExceptions when its
* pool becomes isQuiescent.
*/
+ @Deprecated
static final class ExceptionNode extends WeakReference<ForkJoinTask<?>> {
final Throwable ex;
ExceptionNode next;
@@ -1330,6 +1332,7 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
* to be compliant with AbstractExecutorService constraints
* when used in ForkJoinPool.
*/
+ @Deprecated
static final class AdaptedRunnable<T> extends ForkJoinTask<T>
implements RunnableFuture<T> {
final Runnable runnable;
@@ -1349,6 +1352,7 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
/**
* Adaptor for Runnables without results
*/
+ @Deprecated
static final class AdaptedRunnableAction extends ForkJoinTask<Void>
implements RunnableFuture<Void> {
final Runnable runnable;
@@ -1366,6 +1370,7 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
/**
* Adaptor for Callables
*/
+ @Deprecated
static final class AdaptedCallable<T> extends ForkJoinTask<T>
implements RunnableFuture<T> {
final Callable<? extends T> callable;
diff --git a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java
index e62fc6eb71..e00fb5cc43 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java
@@ -20,6 +20,7 @@ package scala.concurrent.forkjoin;
* @since 1.7
* @author Doug Lea
*/
+@Deprecated
public class ForkJoinWorkerThread extends Thread {
/*
* ForkJoinWorkerThreads are managed by ForkJoinPools and perform
diff --git a/src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java b/src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java
index 07e81b395d..47d52af895 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java
@@ -53,6 +53,7 @@ import java.util.concurrent.locks.LockSupport;
* @author Doug Lea
* @param <E> the type of elements held in this collection
*/
+@Deprecated
public class LinkedTransferQueue<E> extends AbstractQueue<E>
implements TransferQueue<E>, java.io.Serializable {
private static final long serialVersionUID = -3223113410248163686L;
@@ -416,6 +417,7 @@ public class LinkedTransferQueue<E> extends AbstractQueue<E>
* unnecessary ordering constraints: Writes that are intrinsically
* ordered wrt other accesses or CASes use simple relaxed forms.
*/
+ @Deprecated
static final class Node {
final boolean isData; // false if this is a request node
volatile Object item; // initially non-null if isData; CASed to match
@@ -789,6 +791,7 @@ public class LinkedTransferQueue<E> extends AbstractQueue<E>
return count;
}
+ @Deprecated
final class Itr implements Iterator<E> {
private Node nextNode; // next node to return item for
private E nextItem; // the corresponding item
diff --git a/src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java b/src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java
index 1e7cdd952d..f4a77f0f61 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java
@@ -133,6 +133,7 @@ package scala.concurrent.forkjoin;
* @since 1.7
* @author Doug Lea
*/
+@Deprecated
public abstract class RecursiveAction extends ForkJoinTask<Void> {
private static final long serialVersionUID = 5232453952276485070L;
diff --git a/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java b/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java
index d1e1547143..097b7cda1f 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java
@@ -36,6 +36,7 @@ package scala.concurrent.forkjoin;
* @since 1.7
* @author Doug Lea
*/
+@Deprecated
public abstract class RecursiveTask<V> extends ForkJoinTask<V> {
private static final long serialVersionUID = 5232453952276485270L;
diff --git a/src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java b/src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java
index 19237c9092..3ea1af66bc 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java
@@ -32,6 +32,7 @@ import java.util.Random;
* @since 1.7
* @author Doug Lea
*/
+@Deprecated
public class ThreadLocalRandom extends Random {
// same constants as Random, but must be redeclared because private
private static final long multiplier = 0x5DEECE66DL;
@@ -80,6 +81,7 @@ public class ThreadLocalRandom extends Random {
*
* @return the current thread's {@code ThreadLocalRandom}
*/
+ @Deprecated
public static ThreadLocalRandom current() {
return localRandom.get();
}
diff --git a/src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java b/src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java
index 7d149c7ae5..4fcd8ea601 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java
@@ -37,6 +37,7 @@ import java.util.concurrent.*;
* @author Doug Lea
* @param <E> the type of elements held in this collection
*/
+@Deprecated
public interface TransferQueue<E> extends BlockingQueue<E> {
/**
* Transfers the element to a waiting consumer immediately, if possible.
diff --git a/src/forkjoin/scala/concurrent/util/Unsafe.java b/src/forkjoin/scala/concurrent/util/Unsafe.java
index ef893c94d9..d82e4bbdd5 100644
--- a/src/forkjoin/scala/concurrent/util/Unsafe.java
+++ b/src/forkjoin/scala/concurrent/util/Unsafe.java
@@ -7,14 +7,12 @@
\* */
package scala.concurrent.util;
-
-
-
import java.lang.reflect.Field;
-
+@Deprecated
public final class Unsafe {
+ @Deprecated
public final static sun.misc.Unsafe instance;
static {
try {
diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala
index 174254d523..fdedaa600c 100644
--- a/src/interactive/scala/tools/nsc/interactive/Global.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Global.scala
@@ -19,6 +19,8 @@ import scala.annotation.{ elidable, tailrec }
import scala.language.implicitConversions
import scala.tools.nsc.typechecker.Typers
import scala.util.control.Breaks._
+import java.util.concurrent.ConcurrentHashMap
+import scala.collection.JavaConverters.mapAsScalaMapConverter
/**
* This trait allows the IDE to have an instance of the PC that
@@ -160,19 +162,18 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
/** A map of all loaded files to the rich compilation units that correspond to them.
*/
- val unitOfFile = new LinkedHashMap[AbstractFile, RichCompilationUnit] with
- SynchronizedMap[AbstractFile, RichCompilationUnit] {
+ val unitOfFile = mapAsScalaMapConverter(new ConcurrentHashMap[AbstractFile, RichCompilationUnit] {
override def put(key: AbstractFile, value: RichCompilationUnit) = {
val r = super.put(key, value)
- if (r.isEmpty) debugLog("added unit for "+key)
+ if (r == null) debugLog("added unit for "+key)
r
}
- override def remove(key: AbstractFile) = {
+ override def remove(key: Any) = {
val r = super.remove(key)
- if (r.nonEmpty) debugLog("removed unit for "+key)
+ if (r != null) debugLog("removed unit for "+key)
r
}
- }
+ }).asScala
/** A set containing all those files that need to be removed
* Units are removed by getUnit, typically once a unit is finished compiled.
@@ -1091,7 +1092,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
val implicitlyAdded = viaView != NoSymbol
members.add(sym, pre, implicitlyAdded) { (s, st) =>
new TypeMember(s, st,
- context.isAccessible(if (s.hasGetter) s.getter(s.owner) else s, pre, superAccess && !implicitlyAdded),
+ context.isAccessible(if (s.hasGetter) s.getterIn(s.owner) else s, pre, superAccess && !implicitlyAdded),
inherited,
viaView)
}
diff --git a/src/library/scala/PartialFunction.scala b/src/library/scala/PartialFunction.scala
index fba759eb32..98dd35d306 100644
--- a/src/library/scala/PartialFunction.scala
+++ b/src/library/scala/PartialFunction.scala
@@ -161,10 +161,10 @@ trait PartialFunction[-A, +B] extends (A => B) { self =>
object PartialFunction {
/** Composite function produced by `PartialFunction#orElse` method
*/
- private class OrElse[-A, +B] (f1: PartialFunction[A, B], f2: PartialFunction[A, B]) extends PartialFunction[A, B] {
+ private class OrElse[-A, +B] (f1: PartialFunction[A, B], f2: PartialFunction[A, B]) extends scala.runtime.AbstractPartialFunction[A, B] {
def isDefinedAt(x: A) = f1.isDefinedAt(x) || f2.isDefinedAt(x)
- def apply(x: A): B = f1.applyOrElse(x, f2)
+ override def apply(x: A): B = f1.applyOrElse(x, f2)
override def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = {
val z = f1.applyOrElse(x, checkFallback[B])
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index 7f717aa6e4..59c89df3fa 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -126,7 +126,7 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef {
def optManifest[T](implicit m: OptManifest[T]) = m
// Minor variations on identity functions
- def identity[A](x: A): A = x // @see `conforms` for the implicit version
+ @inline def identity[A](x: A): A = x // @see `conforms` for the implicit version
@inline def implicitly[T](implicit e: T) = e // for summoning implicit values from the nether world -- TODO: when dependent method types are on by default, give this result type `e.type`, so that inliner has better chance of knowing which method to inline in calls like `implicitly[MatchingStrategy[Option]].zero`
@inline def locally[T](x: T): T = x // to communicate intent and avoid unmoored statements
diff --git a/src/library/scala/beans/BeanInfo.scala b/src/library/scala/beans/BeanInfo.scala
index 799e93e71a..d7f0a1618b 100644
--- a/src/library/scala/beans/BeanInfo.scala
+++ b/src/library/scala/beans/BeanInfo.scala
@@ -17,4 +17,5 @@ package scala.beans
*
* @author Ross Judson (rjudson@managedobjects.com)
*/
+@deprecated(message = "the generation of BeanInfo classes is no longer supported", since = "2.12.0")
class BeanInfo extends scala.annotation.Annotation
diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala
index 668190f700..e25cc78b6c 100644
--- a/src/library/scala/collection/IterableViewLike.scala
+++ b/src/library/scala/collection/IterableViewLike.scala
@@ -69,6 +69,10 @@ trait IterableViewLike[+A,
trait Appended[B >: A] extends super.Appended[B] with Transformed[B] {
def iterator = self.iterator ++ rest
}
+
+ trait Prepended[B >: A] extends super.Prepended[B] with Transformed[B] {
+ def iterator = fst.toIterator ++ self
+ }
trait Filtered extends super.Filtered with Transformed[A] {
def iterator = self.iterator filter pred
@@ -110,6 +114,7 @@ trait IterableViewLike[+A,
} with AbstractTransformed[(A1, B)] with ZippedAll[A1, B]
protected override def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
protected override def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
+ protected override def newPrepended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val fst = that } with AbstractTransformed[B] with Prepended[B]
protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
protected override def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index 660cc5a42a..4e3abf2286 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -182,7 +182,7 @@ object Iterator {
}
}
def hasNext = (current ne null) && (current.hasNext || advance())
- def next() = if (hasNext) current.next else Iterator.empty.next
+ def next() = if (hasNext) current.next() else Iterator.empty.next()
override def ++[B >: A](that: => GenTraversableOnce[B]): Iterator[B] =
new ConcatIterator(current, queue :+ (() => that.toIterator))
@@ -191,11 +191,55 @@ object Iterator {
private[scala] final class JoinIterator[+A](lhs: Iterator[A], that: => GenTraversableOnce[A]) extends Iterator[A] {
private[this] lazy val rhs: Iterator[A] = that.toIterator
def hasNext = lhs.hasNext || rhs.hasNext
- def next = if (lhs.hasNext) lhs.next else rhs.next
+ def next() = if (lhs.hasNext) lhs.next() else rhs.next()
override def ++[B >: A](that: => GenTraversableOnce[B]) =
new ConcatIterator(this, Vector(() => that.toIterator))
}
+
+ /** Creates a delegating iterator capped by a limit count. Negative limit means unbounded.
+ * Lazily skip to start on first evaluation. Avoids daisy-chained iterators due to slicing.
+ */
+ private[scala] final class SliceIterator[A](val underlying: Iterator[A], start: Int, limit: Int) extends AbstractIterator[A] {
+ private var remaining = limit
+ private var dropping = start
+ @inline private def unbounded = remaining < 0
+ private def skip(): Unit =
+ while (dropping > 0) {
+ if (underlying.hasNext) {
+ underlying.next()
+ dropping -= 1
+ } else
+ dropping = 0
+ }
+ def hasNext = { skip(); remaining != 0 && underlying.hasNext }
+ def next() = {
+ skip()
+ if (remaining > 0) {
+ remaining -= 1
+ underlying.next()
+ }
+ else if (unbounded) underlying.next()
+ else empty.next()
+ }
+ override protected def sliceIterator(from: Int, until: Int): Iterator[A] = {
+ val lo = from max 0
+ def adjustedBound =
+ if (unbounded) -1
+ else 0 max (remaining - lo)
+ val rest =
+ if (until < 0) adjustedBound // respect current bound, if any
+ else if (until <= lo) 0 // empty
+ else if (unbounded) until - lo // now finite
+ else adjustedBound min (until - lo) // keep lesser bound
+ if (rest == 0) empty
+ else {
+ dropping += lo
+ remaining = rest
+ this
+ }
+ }
+ }
}
import Iterator.empty
@@ -307,11 +351,11 @@ trait Iterator[+A] extends TraversableOnce[A] {
/** Selects first ''n'' values of this iterator.
*
* @param n the number of values to take
- * @return an iterator producing only of the first `n` values of this iterator, or else the
+ * @return an iterator producing only the first `n` values of this iterator, or else the
* whole iterator, if it produces fewer than `n` values.
* @note Reuse: $consumesAndProducesIterator
*/
- def take(n: Int): Iterator[A] = slice(0, n)
+ def take(n: Int): Iterator[A] = sliceIterator(0, n max 0)
/** Advances this iterator past the first ''n'' elements, or the length of the iterator, whichever is smaller.
*
@@ -320,34 +364,29 @@ trait Iterator[+A] extends TraversableOnce[A] {
* it omits the first `n` values.
* @note Reuse: $consumesAndProducesIterator
*/
- def drop(n: Int): Iterator[A] = slice(n, Int.MaxValue)
+ def drop(n: Int): Iterator[A] = sliceIterator(n, -1)
/** Creates an iterator returning an interval of the values produced by this iterator.
*
* @param from the index of the first element in this iterator which forms part of the slice.
- * @param until the index of the first element following the slice.
+ * If negative, the slice starts at zero.
+ * @param until the index of the first element following the slice. If negative, the slice is empty.
* @return an iterator which advances this iterator past the first `from` elements using `drop`,
* and then takes `until - from` elements, using `take`.
* @note Reuse: $consumesAndProducesIterator
*/
- def slice(from: Int, until: Int): Iterator[A] = {
+ def slice(from: Int, until: Int): Iterator[A] = sliceIterator(from, until max 0)
+
+ /** Creates an optionally bounded slice, unbounded if `until` is negative. */
+ protected def sliceIterator(from: Int, until: Int): Iterator[A] = {
val lo = from max 0
- var toDrop = lo
- while (toDrop > 0 && self.hasNext) {
- self.next()
- toDrop -= 1
- }
+ val rest =
+ if (until < 0) -1 // unbounded
+ else if (until <= lo) 0 // empty
+ else until - lo // finite
- new AbstractIterator[A] {
- private var remaining = until - lo
- def hasNext = remaining > 0 && self.hasNext
- def next(): A =
- if (remaining > 0) {
- remaining -= 1
- self.next()
- }
- else empty.next()
- }
+ if (rest == 0) empty
+ else new Iterator.SliceIterator(this, lo, rest)
}
/** Creates a new iterator that maps all produced values of this iterator
@@ -1148,9 +1187,8 @@ trait Iterator[+A] extends TraversableOnce[A] {
* $willNotTerminateInf
*/
def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit = {
- require(start >= 0 && (start < xs.length || xs.length == 0), s"start $start out of range ${xs.length}")
var i = start
- val end = start + math.min(len, xs.length - start)
+ val end = start + math.min(len, xs.length - start)
while (i < end && hasNext) {
xs(i) = next()
i += 1
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index 5ec7d5c615..38a598321f 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -230,11 +230,15 @@ self =>
protected class FilteredKeys(p: A => Boolean) extends AbstractMap[A, B] with DefaultMap[A, B] {
override def foreach[C](f: ((A, B)) => C): Unit = for (kv <- self) if (p(kv._1)) f(kv)
def iterator = self.iterator.filter(kv => p(kv._1))
- override def contains(key: A) = self.contains(key) && p(key)
+ override def contains(key: A) = p(key) && self.contains(key)
def get(key: A) = if (!p(key)) None else self.get(key)
}
/** Filters this map by retaining only keys satisfying a predicate.
+ *
+ * '''Note''': the predicate must accept any key of type `A`, not just those already
+ * present in the map, as the predicate is tested before the underlying map is queried.
+ *
* @param p the predicate used to test keys
* @return an immutable map consisting only of those key value pairs of this map where the key satisfies
* the predicate `p`. The resulting map wraps the original map without copying any elements.
@@ -319,11 +323,20 @@ self =>
res
}
- /* Overridden for efficiency. */
- override def toSeq: Seq[(A, B)] = toBuffer[(A, B)]
+ override def toSeq: Seq[(A, B)] = {
+ if (isEmpty) Vector.empty[(A, B)]
+ else {
+ // Default appropriate for immutable collections; mutable collections override this
+ val vb = Vector.newBuilder[(A, B)]
+ foreach(vb += _)
+ vb.result
+ }
+ }
+
override def toBuffer[C >: (A, B)]: mutable.Buffer[C] = {
val result = new mutable.ArrayBuffer[C](size)
- copyToBuffer(result)
+ // Faster to let the map iterate itself than to defer through copyToBuffer
+ foreach(result += _)
result
}
diff --git a/src/library/scala/collection/SeqViewLike.scala b/src/library/scala/collection/SeqViewLike.scala
index e719f19c78..92d5547fd2 100644
--- a/src/library/scala/collection/SeqViewLike.scala
+++ b/src/library/scala/collection/SeqViewLike.scala
@@ -95,6 +95,14 @@ trait SeqViewLike[+A,
if (idx < self.length) self(idx) else restSeq(idx - self.length)
}
+ trait Prepended[B >: A] extends super.Prepended[B] with Transformed[B] {
+ protected[this] lazy val fstSeq = fst.toSeq
+ def length: Int = fstSeq.length + self.length
+ def apply(idx: Int): B =
+ if (idx < fstSeq.length) fstSeq(idx)
+ else self.apply(idx - fstSeq.length)
+ }
+
trait Filtered extends super.Filtered with Transformed[A] {
protected[this] lazy val index = {
var len = 0
@@ -178,21 +186,12 @@ trait SeqViewLike[+A,
final override protected[this] def viewIdentifier = "P"
}
- trait Prepended[B >: A] extends Transformed[B] {
- protected[this] val fst: B
- override def iterator: Iterator[B] = Iterator.single(fst) ++ self.iterator
- def length: Int = 1 + self.length
- def apply(idx: Int): B =
- if (idx == 0) fst
- else self.apply(idx - 1)
- final override protected[this] def viewIdentifier = "A"
- }
-
/** Boilerplate method, to override in each subclass
* This method could be eliminated if Scala had virtual classes
*/
protected override def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
protected override def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
+ protected override def newPrepended[B >: A](that: GenTraversable[B]): Transformed[B] = new { protected[this] val fst = that } with AbstractTransformed[B] with Prepended[B]
protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
protected override def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
@@ -211,7 +210,6 @@ trait SeqViewLike[+A,
val patch = _patch
val replaced = _replaced
} with AbstractTransformed[B] with Patched[B]
- protected def newPrepended[B >: A](elem: B): Transformed[B] = new { protected[this] val fst = elem } with AbstractTransformed[B] with Prepended[B]
// see comment in IterableViewLike.
protected override def newTaken(n: Int): Transformed[A] = newSliced(SliceInterval(0, n))
@@ -241,7 +239,7 @@ trait SeqViewLike[+A,
}
override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That =
- newPrepended(elem).asInstanceOf[That]
+ newPrepended(elem :: Nil).asInstanceOf[That]
override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That =
++(Iterator.single(elem))(bf)
diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala
index 3e549f72cd..80a344e6a8 100644
--- a/src/library/scala/collection/SetLike.scala
+++ b/src/library/scala/collection/SetLike.scala
@@ -77,11 +77,20 @@ self =>
protected[this] override def parCombiner = ParSet.newCombiner[A]
- /* Overridden for efficiency. */
- override def toSeq: Seq[A] = toBuffer[A]
+ // Default collection type appropriate for immutable collections; mutable collections override this
+ override def toSeq: Seq[A] = {
+ if (isEmpty) Vector.empty[A]
+ else {
+ val vb = Vector.newBuilder[A]
+ foreach(vb += _)
+ vb.result
+ }
+ }
+
override def toBuffer[A1 >: A]: mutable.Buffer[A1] = {
val result = new mutable.ArrayBuffer[A1](size)
- copyToBuffer(result)
+ // Faster to let the map iterate itself than to defer through copyToBuffer
+ foreach(result += _)
result
}
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index d3a7db6968..a8731a51b1 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -253,7 +253,7 @@ trait TraversableLike[+A, +Repr] extends Any
b.result
}
- private def filterImpl(p: A => Boolean, isFlipped: Boolean): Repr = {
+ private[scala] def filterImpl(p: A => Boolean, isFlipped: Boolean): Repr = {
val b = newBuilder
for (x <- this)
if (p(x) != isFlipped) b += x
diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala
index 5926c69ebf..0901d749c3 100644
--- a/src/library/scala/collection/TraversableViewLike.scala
+++ b/src/library/scala/collection/TraversableViewLike.scala
@@ -189,6 +189,15 @@ trait TraversableViewLike[+A,
}
final override protected[this] def viewIdentifier = "A"
}
+
+ trait Prepended[B >: A] extends Transformed[B] {
+ protected[this] val fst: GenTraversable[B]
+ def foreach[U](f: B => U) {
+ fst foreach f
+ self foreach f
+ }
+ final override protected[this] def viewIdentifier = "A"
+ }
trait Filtered extends Transformed[A] {
protected[this] val pred: A => Boolean
@@ -222,11 +231,15 @@ trait TraversableViewLike[+A,
final override protected[this] def viewIdentifier = "D"
}
- override def ++[B >: A, That](xs: GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That = {
+ override def ++[B >: A, That](xs: GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That =
newAppended(xs.seq.toTraversable).asInstanceOf[That]
-// was: if (bf.isInstanceOf[ByPassCanBuildFrom]) newAppended(that).asInstanceOf[That]
-// else super.++[B, That](that)(bf)
- }
+
+ override def ++:[B >: A, That](xs: TraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That =
+ newPrepended(xs.seq.toTraversable).asInstanceOf[That]
+
+ // Need second one because of optimization in TraversableLike
+ override def ++:[B >: A, That](xs: Traversable[B])(implicit bf: CanBuildFrom[This, B, That]): That =
+ newPrepended(xs).asInstanceOf[That]
override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That = {
newMapped(f).asInstanceOf[That]
@@ -253,6 +266,7 @@ trait TraversableViewLike[+A,
*/
protected def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
protected def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
+ protected def newPrepended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val fst = that } with AbstractTransformed[B] with Prepended[B]
protected def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
protected def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
protected def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala
index e4b7371ed4..837143784b 100644
--- a/src/library/scala/collection/immutable/HashSet.scala
+++ b/src/library/scala/collection/immutable/HashSet.scala
@@ -194,7 +194,7 @@ class HashSet[A] extends AbstractSet[A]
protected def get0(key: A, hash: Int, level: Int): Boolean = false
- def updated0(key: A, hash: Int, level: Int): HashSet[A] =
+ private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] =
new HashSet.HashSet1(key, hash)
protected def removed0(key: A, hash: Int, level: Int): HashSet[A] = this
@@ -256,10 +256,10 @@ object HashSet extends ImmutableSetFactory[HashSet] {
class HashSet1[A](private[HashSet] val key: A, private[HashSet] val hash: Int) extends LeafHashSet[A] {
override def size = 1
- override def get0(key: A, hash: Int, level: Int): Boolean =
+ override protected def get0(key: A, hash: Int, level: Int): Boolean =
(hash == this.hash && key == this.key)
- override def subsetOf0(that: HashSet[A], level: Int) = {
+ override protected def subsetOf0(that: HashSet[A], level: Int) = {
// check if that contains this.key
// we use get0 with our key and hash at the correct level instead of calling contains,
// which would not work since that might not be a top-level HashSet
@@ -267,7 +267,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
that.get0(key, hash, level)
}
- override def updated0(key: A, hash: Int, level: Int): HashSet[A] =
+ override private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] =
if (hash == this.hash && key == this.key) this
else {
if (hash != this.hash) {
@@ -312,7 +312,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
override private[immutable] def diff0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] =
if (that.get0(key, hash, level)) null else this
- override def removed0(key: A, hash: Int, level: Int): HashSet[A] =
+ override protected def removed0(key: A, hash: Int, level: Int): HashSet[A] =
if (hash == this.hash && key == this.key) null else this
override protected def filter0(p: A => Boolean, negate: Boolean, level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] =
@@ -326,10 +326,10 @@ object HashSet extends ImmutableSetFactory[HashSet] {
override def size = ks.size
- override def get0(key: A, hash: Int, level: Int): Boolean =
+ override protected def get0(key: A, hash: Int, level: Int): Boolean =
if (hash == this.hash) ks.contains(key) else false
- override def subsetOf0(that: HashSet[A], level: Int) = {
+ override protected def subsetOf0(that: HashSet[A], level: Int) = {
// we have to check each element
// we use get0 with our hash at the correct level instead of calling contains,
// which would not work since that might not be a top-level HashSet
@@ -337,11 +337,11 @@ object HashSet extends ImmutableSetFactory[HashSet] {
ks.forall(key => that.get0(key, hash, level))
}
- override def updated0(key: A, hash: Int, level: Int): HashSet[A] =
+ override private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] =
if (hash == this.hash) new HashSetCollision1(hash, ks + key)
else makeHashTrieSet(this.hash, this, hash, new HashSet1(key, hash), level)
- override def union0(that: LeafHashSet[A], level: Int): HashSet[A] = that match {
+ override private[immutable] def union0(that: LeafHashSet[A], level: Int): HashSet[A] = that match {
case that if that.hash != this.hash =>
// different hash code, so there is no need to investigate further.
// Just create a branch node containing the two.
@@ -374,7 +374,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
}
}
- override def union0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = that match {
+ override private[immutable] def union0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = that match {
case that: LeafHashSet[A] =>
// switch to the simpler Tree/Leaf implementation
this.union0(that, level)
@@ -431,7 +431,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
}
}
- override def removed0(key: A, hash: Int, level: Int): HashSet[A] =
+ override protected def removed0(key: A, hash: Int, level: Int): HashSet[A] =
if (hash == this.hash) {
val ks1 = ks - key
ks1.size match {
@@ -528,7 +528,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
override def size = size0
- override def get0(key: A, hash: Int, level: Int): Boolean = {
+ override protected def get0(key: A, hash: Int, level: Int): Boolean = {
val index = (hash >>> level) & 0x1f
val mask = (1 << index)
if (bitmap == - 1) {
@@ -540,7 +540,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
false
}
- override def updated0(key: A, hash: Int, level: Int): HashSet[A] = {
+ override private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] = {
val index = (hash >>> level) & 0x1f
val mask = (1 << index)
val offset = Integer.bitCount(bitmap & (mask-1))
@@ -842,7 +842,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
case _ => this
}
- override def removed0(key: A, hash: Int, level: Int): HashSet[A] = {
+ override protected def removed0(key: A, hash: Int, level: Int): HashSet[A] = {
val index = (hash >>> level) & 0x1f
val mask = (1 << index)
val offset = Integer.bitCount(bitmap & (mask-1))
@@ -879,7 +879,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
}
}
- override def subsetOf0(that: HashSet[A], level: Int): Boolean = if (that eq this) true else that match {
+ override protected def subsetOf0(that: HashSet[A], level: Int): Boolean = if (that eq this) true else that match {
case that: HashTrieSet[A] if this.size0 <= that.size0 =>
// create local mutable copies of members
var abm = this.bitmap
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index aa9dec2761..a8f1149615 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -13,7 +13,7 @@ package immutable
import generic._
import mutable.{Builder, ListBuffer}
import scala.annotation.tailrec
-import java.io._
+import java.io.{ObjectOutputStream, ObjectInputStream}
/** A class for immutable linked lists representing ordered collections
* of elements of type.
@@ -85,7 +85,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
with Product
with GenericTraversableTemplate[A, List]
with LinearSeqOptimized[A, List[A]]
- with Serializable {
+ with scala.Serializable {
override def companion: GenericCompanion[List] = List
import scala.collection.{Iterable, Traversable, Seq, IndexedSeq}
diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala
index 7c40e84280..c5773338f5 100644
--- a/src/library/scala/collection/immutable/ListMap.scala
+++ b/src/library/scala/collection/immutable/ListMap.scala
@@ -29,7 +29,11 @@ object ListMap extends ImmutableMapFactory[ListMap] {
new MapCanBuildFrom[A, B]
def empty[A, B]: ListMap[A, B] = EmptyListMap.asInstanceOf[ListMap[A, B]]
- private object EmptyListMap extends ListMap[Any, Nothing] { }
+ @SerialVersionUID(-8256686706655863282L)
+ private object EmptyListMap extends ListMap[Any, Nothing] {
+ override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: Any) = false
+ }
}
/** This class implements immutable maps using a list-based data structure.
@@ -159,7 +163,6 @@ extends AbstractMap[A, B]
*/
override def apply(k: A): B1 = apply0(this, k)
-
@tailrec private def apply0(cur: ListMap[A, B1], k: A): B1 =
if (cur.isEmpty) throw new NoSuchElementException("key not found: "+k)
else if (k == cur.key) cur.value
@@ -176,7 +179,16 @@ extends AbstractMap[A, B]
@tailrec private def get0(cur: ListMap[A, B1], k: A): Option[B1] =
if (k == cur.key) Some(cur.value)
else if (cur.next.nonEmpty) get0(cur.next, k) else None
-
+
+
+ override def contains(key: A): Boolean = contains0(this, key)
+
+ @tailrec private def contains0(cur: ListMap[A, B1], k: A): Boolean =
+ if (k == cur.key) true
+ else if (cur.next.nonEmpty) contains0(cur.next, k)
+ else false
+
+
/** This method allows one to create a new map with an additional mapping
* from `key` to `value`. If the map contains already a mapping for `key`,
* it will be overridden by this function.
@@ -186,6 +198,7 @@ extends AbstractMap[A, B]
new m.Node[B2](k, v)
}
+
/** Creates a new mapping without the given `key`.
* If the map does not contain a mapping for the given key, the
* method returns the same map.
diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala
index 5178d5a862..63ddcb18cf 100644
--- a/src/library/scala/collection/immutable/Map.scala
+++ b/src/library/scala/collection/immutable/Map.scala
@@ -94,6 +94,8 @@ object Map extends ImmutableMapFactory[Map] {
private object EmptyMap extends AbstractMap[Any, Nothing] with Map[Any, Nothing] with Serializable {
override def size: Int = 0
+ override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: Any) = false
def get(key: Any): Option[Nothing] = None
def iterator: Iterator[(Any, Nothing)] = Iterator.empty
override def updated [B1] (key: Any, value: B1): Map[Any, B1] = new Map1(key, value)
@@ -103,6 +105,8 @@ object Map extends ImmutableMapFactory[Map] {
class Map1[A, +B](key1: A, value1: B) extends AbstractMap[A, B] with Map[A, B] with Serializable {
override def size = 1
+ override def apply(key: A) = if (key == key1) value1 else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: A) = key == key1
def get(key: A): Option[B] =
if (key == key1) Some(value1) else None
def iterator = Iterator((key1, value1))
@@ -119,6 +123,11 @@ object Map extends ImmutableMapFactory[Map] {
class Map2[A, +B](key1: A, value1: B, key2: A, value2: B) extends AbstractMap[A, B] with Map[A, B] with Serializable {
override def size = 2
+ override def apply(key: A) =
+ if (key == key1) value1
+ else if (key == key2) value2
+ else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: A) = (key == key1) || (key == key2)
def get(key: A): Option[B] =
if (key == key1) Some(value1)
else if (key == key2) Some(value2)
@@ -140,6 +149,12 @@ object Map extends ImmutableMapFactory[Map] {
class Map3[A, +B](key1: A, value1: B, key2: A, value2: B, key3: A, value3: B) extends AbstractMap[A, B] with Map[A, B] with Serializable {
override def size = 3
+ override def apply(key: A) =
+ if (key == key1) value1
+ else if (key == key2) value2
+ else if (key == key3) value3
+ else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: A) = (key == key1) || (key == key2) || (key == key3)
def get(key: A): Option[B] =
if (key == key1) Some(value1)
else if (key == key2) Some(value2)
@@ -164,6 +179,13 @@ object Map extends ImmutableMapFactory[Map] {
class Map4[A, +B](key1: A, value1: B, key2: A, value2: B, key3: A, value3: B, key4: A, value4: B) extends AbstractMap[A, B] with Map[A, B] with Serializable {
override def size = 4
+ override def apply(key: A) =
+ if (key == key1) value1
+ else if (key == key2) value2
+ else if (key == key3) value3
+ else if (key == key4) value4
+ else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: A) = (key == key1) || (key == key2) || (key == key3) || (key == key4)
def get(key: A): Option[B] =
if (key == key1) Some(value1)
else if (key == key2) Some(value2)
diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala
index 3a64820be6..fb9f6703a9 100644
--- a/src/library/scala/collection/immutable/PagedSeq.scala
+++ b/src/library/scala/collection/immutable/PagedSeq.scala
@@ -12,7 +12,7 @@ package scala
package collection
package immutable
-import java.io._
+import java.io.{File, FileReader, Reader}
import scala.util.matching.Regex
import scala.reflect.ClassTag
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index 3ae8a2c342..0b380517f8 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -202,7 +202,24 @@ extends scala.collection.AbstractSeq[Int]
copy(locationAfterN(n), end, step)
}
)
-
+
+ /** Creates a new range containing the elements starting at `from` up to but not including `until`.
+ *
+ * $doesNotUseBuilders
+ *
+ * @param from the element at which to start
+ * @param until the element at which to end (not included in the range)
+ * @return a new range consisting of a contiguous interval of values in the old range
+ */
+ override def slice(from: Int, until: Int): Range =
+ if (from <= 0) take(until)
+ else if (until >= numRangeElements && numRangeElements >= 0) drop(from)
+ else {
+ val fromValue = locationAfterN(from)
+ if (from >= until) newEmptyRange(fromValue)
+ else new Range.Inclusive(fromValue, locationAfterN(until-1), step)
+ }
+
/** Creates a new range containing all the elements of this range except the last one.
*
* $doesNotUseBuilders
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index 714d5117d3..91a4e1c43d 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -499,6 +499,16 @@ self =>
)
else super.flatMap(f)(bf)
+ override private[scala] def filterImpl(p: A => Boolean, isFlipped: Boolean): Stream[A] = {
+ // optimization: drop leading prefix of elems for which f returns false
+ // var rest = this dropWhile (!p(_)) - forget DRY principle - GC can't collect otherwise
+ var rest = this
+ while (!rest.isEmpty && p(rest.head) == isFlipped) rest = rest.tail
+ // private utility func to avoid `this` on stack (would be needed for the lazy arg)
+ if (rest.nonEmpty) Stream.filteredTail(rest, p, isFlipped)
+ else Stream.Empty
+ }
+
/** Returns all the elements of this `Stream` that satisfy the predicate `p`
* in a new `Stream` - i.e., it is still a lazy data structure. The order of
* the elements is preserved
@@ -512,15 +522,7 @@ self =>
* // produces
* }}}
*/
- override def filter(p: A => Boolean): Stream[A] = {
- // optimization: drop leading prefix of elems for which f returns false
- // var rest = this dropWhile (!p(_)) - forget DRY principle - GC can't collect otherwise
- var rest = this
- while (!rest.isEmpty && !p(rest.head)) rest = rest.tail
- // private utility func to avoid `this` on stack (would be needed for the lazy arg)
- if (rest.nonEmpty) Stream.filteredTail(rest, p)
- else Stream.Empty
- }
+ override def filter(p: A => Boolean): Stream[A] = filterImpl(p, isFlipped = false) // This override is only left in 2.11 because of binary compatibility, see PR #3925
override final def withFilter(p: A => Boolean): StreamWithFilter = new StreamWithFilter(p)
@@ -1284,8 +1286,8 @@ object Stream extends SeqFactory[Stream] {
else cons(start, range(start + step, end, step))
}
- private[immutable] def filteredTail[A](stream: Stream[A], p: A => Boolean) = {
- cons(stream.head, stream.tail filter p)
+ private[immutable] def filteredTail[A](stream: Stream[A], p: A => Boolean, isFlipped: Boolean) = {
+ cons(stream.head, stream.tail.filterImpl(p, isFlipped))
}
private[immutable] def collectedTail[A, B, That](head: B, stream: Stream[A], pf: PartialFunction[A, B], bf: CanBuildFrom[Stream[A], B, That]) = {
diff --git a/src/library/scala/collection/immutable/StreamViewLike.scala b/src/library/scala/collection/immutable/StreamViewLike.scala
index c2eb85815d..4d7eaeff2a 100644
--- a/src/library/scala/collection/immutable/StreamViewLike.scala
+++ b/src/library/scala/collection/immutable/StreamViewLike.scala
@@ -53,6 +53,7 @@ extends SeqView[A, Coll]
/** boilerplate */
protected override def newForced[B](xs: => scala.collection.GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
protected override def newAppended[B >: A](that: scala.collection.GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
+ protected override def newPrepended[B >: A](that: scala.collection.GenTraversable[B]): Transformed[B] = new { protected[this] val fst = that } with AbstractTransformed[B] with Prepended[B]
protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
protected override def newFlatMapped[B](f: A => scala.collection.GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
@@ -67,7 +68,6 @@ extends SeqView[A, Coll]
protected override def newPatched[B >: A](_from: Int, _patch: scala.collection.GenSeq[B], _replaced: Int): Transformed[B] = {
new { val from = _from; val patch = _patch; val replaced = _replaced } with AbstractTransformed[B] with Patched[B]
}
- protected override def newPrepended[B >: A](elem: B): Transformed[B] = new { protected[this] val fst = elem } with AbstractTransformed[B] with Prepended[B]
override def stringPrefix = "StreamView"
}
diff --git a/src/library/scala/collection/mutable/AnyRefMap.scala b/src/library/scala/collection/mutable/AnyRefMap.scala
index 47fb66744e..2c7e76c5f5 100644
--- a/src/library/scala/collection/mutable/AnyRefMap.scala
+++ b/src/library/scala/collection/mutable/AnyRefMap.scala
@@ -335,6 +335,24 @@ extends AbstractMap[K, V]
arm
}
+ override def +[V1 >: V](kv: (K, V1)): AnyRefMap[K, V1] = {
+ val arm = clone().asInstanceOf[AnyRefMap[K, V1]]
+ arm += kv
+ arm
+ }
+
+ override def ++[V1 >: V](xs: GenTraversableOnce[(K, V1)]): AnyRefMap[K, V1] = {
+ val arm = clone().asInstanceOf[AnyRefMap[K, V1]]
+ xs.foreach(kv => arm += kv)
+ arm
+ }
+
+ override def updated[V1 >: V](key: K, value: V1): AnyRefMap[K, V1] = {
+ val arm = clone().asInstanceOf[AnyRefMap[K, V1]]
+ arm += (key, value)
+ arm
+ }
+
private[this] def foreachElement[A,B](elems: Array[AnyRef], f: A => B) {
var i,j = 0
while (i < _hashes.length & j < _size) {
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index 00491ef20e..2bc41b5802 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -40,9 +40,8 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza
arrayElementClass(repr.getClass)
override def copyToArray[U >: T](xs: Array[U], start: Int, len: Int) {
- var l = math.min(len, repr.length)
- if (xs.length - start < l) l = xs.length - start max 0
- Array.copy(repr, 0, xs, start, l)
+ val l = len min repr.length min (xs.length - start)
+ if (l > 0) Array.copy(repr, 0, xs, start, l)
}
override def toArray[U >: T : ClassTag]: Array[U] = {
diff --git a/src/library/scala/collection/mutable/ArraySeq.scala b/src/library/scala/collection/mutable/ArraySeq.scala
index 577a838315..5a50f4fb27 100644
--- a/src/library/scala/collection/mutable/ArraySeq.scala
+++ b/src/library/scala/collection/mutable/ArraySeq.scala
@@ -87,7 +87,7 @@ extends AbstractSeq[A]
*/
override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) {
val len1 = len min (xs.length - start) min length
- Array.copy(array, 0, xs, start, len1)
+ if (len1 > 0) Array.copy(array, 0, xs, start, len1)
}
override def clone(): ArraySeq[A] = {
diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala
index 3c57387c03..8d24538620 100644
--- a/src/library/scala/collection/mutable/BufferLike.scala
+++ b/src/library/scala/collection/mutable/BufferLike.scala
@@ -211,13 +211,6 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
*/
override def stringPrefix: String = "Buffer"
- /** Returns the current evolving(!) state of this buffer as a read-only sequence.
- *
- * @return A sequence that forwards to this buffer for all its operations.
- */
- @deprecated("The returned sequence changes as this buffer is mutated. For an immutable copy, use, e.g., toList.", "2.11.0")
- def readOnly: scala.collection.Seq[A] = toSeq
-
/** Creates a new collection containing both the elements of this collection and the provided
* traversable object.
*
diff --git a/src/library/scala/collection/mutable/BufferProxy.scala b/src/library/scala/collection/mutable/BufferProxy.scala
index d9632cce91..2d52831d37 100644
--- a/src/library/scala/collection/mutable/BufferProxy.scala
+++ b/src/library/scala/collection/mutable/BufferProxy.scala
@@ -43,8 +43,6 @@ trait BufferProxy[A] extends Buffer[A] with Proxy {
*/
def +=(elem: A): this.type = { self.+=(elem); this }
- override def readOnly = self.readOnly
-
/** Appends a number of elements provided by a traversable object.
*
* @param xs the traversable object.
diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala
index 5e838d0d88..a611048da2 100644
--- a/src/library/scala/collection/mutable/ListBuffer.scala
+++ b/src/library/scala/collection/mutable/ListBuffer.scala
@@ -12,7 +12,7 @@ package mutable
import generic._
import immutable.{List, Nil, ::}
-import java.io._
+import java.io.{ObjectOutputStream, ObjectInputStream}
import scala.annotation.migration
/** A `Buffer` implementation back up by a list. It provides constant time
@@ -408,9 +408,6 @@ final class ListBuffer[A]
}
}
- @deprecated("The result of this method will change along with this buffer, which is often not what's expected.", "2.11.0")
- override def readOnly: List[A] = start
-
// Private methods
/** Copy contents of this buffer */
@@ -426,7 +423,7 @@ final class ListBuffer[A]
}
override def equals(that: Any): Boolean = that match {
- case that: ListBuffer[_] => this.readOnly equals that.readOnly
+ case that: ListBuffer[_] => this.start equals that.start
case _ => super.equals(that)
}
diff --git a/src/library/scala/collection/mutable/LongMap.scala b/src/library/scala/collection/mutable/LongMap.scala
index ef488a3697..eea33e3044 100644
--- a/src/library/scala/collection/mutable/LongMap.scala
+++ b/src/library/scala/collection/mutable/LongMap.scala
@@ -415,6 +415,24 @@ extends AbstractMap[Long, V]
lm
}
+ override def +[V1 >: V](kv: (Long, V1)): LongMap[V1] = {
+ val lm = clone().asInstanceOf[LongMap[V1]]
+ lm += kv
+ lm
+ }
+
+ override def ++[V1 >: V](xs: GenTraversableOnce[(Long, V1)]): LongMap[V1] = {
+ val lm = clone().asInstanceOf[LongMap[V1]]
+ xs.foreach(kv => lm += kv)
+ lm
+ }
+
+ override def updated[V1 >: V](key: Long, value: V1): LongMap[V1] = {
+ val lm = clone().asInstanceOf[LongMap[V1]]
+ lm += (key, value)
+ lm
+ }
+
/** Applies a function to all keys of this map. */
def foreachKey[A](f: Long => A) {
if ((extraKeys & 1) == 1) f(0L)
@@ -541,7 +559,7 @@ object LongMap {
/** Creates a new `LongMap` from keys and values.
* Equivalent to but more efficient than `LongMap((keys zip values): _*)`.
*/
- def fromZip[V](keys: Iterable[Long], values: Iterable[V]): LongMap[V] = {
+ def fromZip[V](keys: collection.Iterable[Long], values: collection.Iterable[V]): LongMap[V] = {
val sz = math.min(keys.size, values.size)
val lm = new LongMap[V](sz * 2)
val ki = keys.iterator
diff --git a/src/library/scala/collection/mutable/MapLike.scala b/src/library/scala/collection/mutable/MapLike.scala
index 6230fc23aa..8ba31d47b6 100644
--- a/src/library/scala/collection/mutable/MapLike.scala
+++ b/src/library/scala/collection/mutable/MapLike.scala
@@ -59,6 +59,18 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
override protected[this] def newBuilder: Builder[(A, B), This] = empty
protected[this] override def parCombiner = ParMap.newCombiner[A, B]
+
+ /** Converts this $coll to a sequence.
+ *
+ * ```Note```: assumes a fast `size` method. Subclasses should override if this is not true.
+ */
+ override def toSeq: collection.Seq[(A, B)] = {
+ // ArrayBuffer for efficiency, preallocated to the right size.
+ val result = new ArrayBuffer[(A, B)](size)
+ foreach(result += _)
+ result
+ }
+
/** Adds a new key/value pair to this map and optionally returns previously bound value.
* If the map already contains a
diff --git a/src/library/scala/collection/mutable/ResizableArray.scala b/src/library/scala/collection/mutable/ResizableArray.scala
index c3047522e2..85a299216e 100644
--- a/src/library/scala/collection/mutable/ResizableArray.scala
+++ b/src/library/scala/collection/mutable/ResizableArray.scala
@@ -74,7 +74,7 @@ trait ResizableArray[A] extends IndexedSeq[A]
*/
override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) {
val len1 = len min (xs.length - start) min length
- Array.copy(array, 0, xs, start, len1)
+ if (len1 > 0) Array.copy(array, 0, xs, start, len1)
}
//##########################################################################
diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala
index a377b03124..cbe7a639dd 100644
--- a/src/library/scala/collection/mutable/SetLike.scala
+++ b/src/library/scala/collection/mutable/SetLike.scala
@@ -72,6 +72,17 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
protected[this] override def parCombiner = ParSet.newCombiner[A]
+ /** Converts this $coll to a sequence.
+ *
+ * ```Note```: assumes a fast `size` method. Subclasses should override if this is not true.
+ */
+ override def toSeq: collection.Seq[A] = {
+ // ArrayBuffer for efficiency, preallocated to the right size.
+ val result = new ArrayBuffer[A](size)
+ foreach(result += _)
+ result
+ }
+
/** Adds an element to this $coll.
*
* @param elem the element to be added
diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala
index 5f2ceac0e0..7bb278b038 100644
--- a/src/library/scala/collection/parallel/RemainsIterator.scala
+++ b/src/library/scala/collection/parallel/RemainsIterator.scala
@@ -456,6 +456,15 @@ self =>
}
it
}
+ /** Drop implemented as simple eager consumption. */
+ override def drop(n: Int): IterableSplitter[T] = {
+ var i = 0
+ while (i < n && hasNext) {
+ next()
+ i += 1
+ }
+ this
+ }
override def take(n: Int): IterableSplitter[T] = newTaken(n)
override def slice(from1: Int, until1: Int): IterableSplitter[T] = newSliceInternal(newTaken(until1), from1)
diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
index 65a632470e..3a1ec7fff8 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
@@ -197,7 +197,7 @@ extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetC
while (i < chunksz) {
val v = chunkarr(i).asInstanceOf[T]
val hc = trie.computeHash(v)
- trie = trie.updated0(v, hc, rootbits)
+ trie = trie.updated0(v, hc, rootbits) // internal API, private[collection]
i += 1
}
i = 0
diff --git a/src/library/scala/concurrent/BlockContext.scala b/src/library/scala/concurrent/BlockContext.scala
index 747cc393c3..2b8ed4c7ca 100644
--- a/src/library/scala/concurrent/BlockContext.scala
+++ b/src/library/scala/concurrent/BlockContext.scala
@@ -41,7 +41,7 @@ package scala.concurrent
trait BlockContext {
/** Used internally by the framework;
- * Designates (and eventually executes) a thunk which potentially blocks the calling `Thread`.
+ * Designates (and eventually executes) a thunk which potentially blocks the calling `java.lang.Thread`.
*
* Clients must use `scala.concurrent.blocking` or `scala.concurrent.Await` instead.
*/
@@ -53,9 +53,16 @@ object BlockContext {
override def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T = thunk
}
+ /**
+ * @return the `BlockContext` that will be used if no other is found.
+ **/
+ def defaultBlockContext: BlockContext = DefaultBlockContext
+
private val contextLocal = new ThreadLocal[BlockContext]()
- /** Obtain the current thread's current `BlockContext`. */
+ /**
+ @return the `BlockContext` that would be used for the current `java.lang.Thread` at this point
+ **/
def current: BlockContext = contextLocal.get match {
case null => Thread.currentThread match {
case ctx: BlockContext => ctx
@@ -64,7 +71,9 @@ object BlockContext {
case some => some
}
- /** Pushes a current `BlockContext` while executing `body`. */
+ /**
+ * Installs a current `BlockContext` around executing `body`.
+ **/
def withBlockContext[T](blockContext: BlockContext)(body: => T): T = {
val old = contextLocal.get // can be null
try {
diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala
index 11d3bb8b02..d728a7f97a 100644
--- a/src/library/scala/concurrent/ExecutionContext.scala
+++ b/src/library/scala/concurrent/ExecutionContext.scala
@@ -72,22 +72,24 @@ trait ExecutionContext {
*/
def reportFailure(@deprecatedName('t) cause: Throwable): Unit
- /** Prepares for the execution of a task. Returns the prepared execution context.
- *
- * `prepare` should be called at the site where an `ExecutionContext` is received (for
- * example, through an implicit method parameter). The returned execution context may
- * then be used to execute tasks. The role of `prepare` is to save any context relevant
- * to an execution's ''call site'', so that this context may be restored at the
- * ''execution site''. (These are often different: for example, execution may be
- * suspended through a `Promise`'s future until the `Promise` is completed, which may
- * be done in another thread, on another stack.)
- *
- * Note: a valid implementation of `prepare` is one that simply returns `this`.
- *
- * @return the prepared execution context
- */
+ /** Prepares for the execution of a task. Returns the prepared
+ * execution context. The recommended implementation of
+ * `prepare` is to return `this`.
+ *
+ * This method should no longer be overridden or called. It was
+ * originally expected that `prepare` would be called by
+ * all libraries that consume ExecutionContexts, in order to
+ * capture thread local context. However, this usage has proven
+ * difficult to implement in practice and instead it is
+ * now better to avoid using `prepare` entirely.
+ *
+ * Instead, if an `ExecutionContext` needs to capture thread
+ * local context, it should capture that context when it is
+ * constructed, so that it doesn't need any additional
+ * preparation later.
+ */
+ @deprecated("Preparation of ExecutionContexts will be removed.", "2.12")
def prepare(): ExecutionContext = this
-
}
/**
@@ -115,7 +117,7 @@ object ExecutionContext {
*
* @return the global `ExecutionContext`
*/
- def global: ExecutionContextExecutor = Implicits.global
+ def global: ExecutionContextExecutor = Implicits.global.asInstanceOf[ExecutionContextExecutor]
object Implicits {
/**
@@ -125,7 +127,7 @@ object ExecutionContext {
* The default `ExecutionContext` implementation is backed by a port of
* [[http://gee.cs.oswego.edu/dl/jsr166/dist/jsr166-4jdk7docs/java/util/concurrent/ForkJoinPool.html java.util.concurrent.ForkJoinPool]].
*/
- implicit lazy val global: ExecutionContextExecutor = impl.ExecutionContextImpl.fromExecutor(null: Executor)
+ implicit lazy val global: ExecutionContext = impl.ExecutionContextImpl.fromExecutor(null: Executor)
}
/** Creates an `ExecutionContext` from the given `ExecutorService`.
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index e93a3284dc..2c7f0879ab 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -10,26 +10,22 @@ package scala.concurrent
import scala.language.higherKinds
-import java.util.concurrent.{ ConcurrentLinkedQueue, TimeUnit, Callable }
+import java.util.concurrent.{ CountDownLatch, TimeUnit, Callable }
import java.util.concurrent.TimeUnit.{ NANOSECONDS => NANOS, MILLISECONDS ⇒ MILLIS }
-import java.lang.{ Iterable => JIterable }
-import java.util.{ LinkedList => JLinkedList }
-import java.util.concurrent.atomic.{ AtomicReferenceFieldUpdater, AtomicInteger, AtomicLong, AtomicBoolean }
+import java.util.concurrent.atomic.AtomicInteger
import scala.util.control.NonFatal
-import scala.Option
import scala.util.{Try, Success, Failure}
-
+import scala.concurrent.duration._
import scala.annotation.tailrec
import scala.collection.mutable.Builder
import scala.collection.generic.CanBuildFrom
import scala.reflect.ClassTag
-
/** The trait that represents futures.
*
- * Asynchronous computations that yield futures are created with the `Future` call:
+ * Asynchronous computations that yield futures are created with the `Future.apply` call:
*
* {{{
* val s = "Hello"
@@ -60,6 +56,10 @@ import scala.reflect.ClassTag
* If a future is failed with a `scala.runtime.NonLocalReturnControl`,
* it is completed with a value from that throwable instead.
*
+ * @define swallowsExceptions
+ * Since this method executes asynchronously and does not produce a return value,
+ * any non-fatal exceptions thrown will be reported to the `ExecutionContext`.
+ *
* @define nonDeterministic
* Note: using this method yields nondeterministic dataflow programs.
*
@@ -91,14 +91,7 @@ import scala.reflect.ClassTag
* `execute()` either immediately or asynchronously.
*/
trait Future[+T] extends Awaitable[T] {
-
- // The executor within the lexical scope
- // of the Future trait. Note that this will
- // (modulo bugs) _never_ execute a callback
- // other than those below in this same file.
- //
- // See the documentation on `InternalCallbackExecutor` for more details.
- private def internalExecutor = Future.InternalCallbackExecutor
+ import Future.{ InternalCallbackExecutor => internalExecutor }
/* Callbacks */
@@ -109,9 +102,11 @@ trait Future[+T] extends Awaitable[T] {
* If the future has already been completed with a value,
* this will either be applied immediately or be scheduled asynchronously.
*
+ * $swallowsExceptions
* $multipleCallbacks
* $callbackInContext
*/
+ @deprecated("use `foreach` or `onComplete` instead (keep in mind that they take total rather than partial functions)", "2.12")
def onSuccess[U](pf: PartialFunction[T, U])(implicit executor: ExecutionContext): Unit = onComplete {
case Success(v) =>
pf.applyOrElse[T, Any](v, Predef.conforms[T]) // Exploiting the cached function to avoid MatchError
@@ -128,9 +123,11 @@ trait Future[+T] extends Awaitable[T] {
*
* Will not be called in case that the future is completed with a value.
*
+ * $swallowsExceptions
* $multipleCallbacks
* $callbackInContext
*/
+ @deprecated("use `onComplete` or `failed.foreach` instead (keep in mind that they take total rather than partial functions)", "2.12")
def onFailure[U](@deprecatedName('callback) pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = onComplete {
case Failure(t) =>
pf.applyOrElse[Throwable, Any](t, Predef.conforms[Throwable]) // Exploiting the cached function to avoid MatchError
@@ -143,8 +140,12 @@ trait Future[+T] extends Awaitable[T] {
* If the future has already been completed,
* this will either be applied immediately or be scheduled asynchronously.
*
+ * $swallowsExceptions
* $multipleCallbacks
* $callbackInContext
+ *
+ * @tparam U only used to accept any return type of the given callback function
+ * @param f the function to be executed when this `Future` completes
*/
def onComplete[U](@deprecatedName('func) f: Try[T] => U)(implicit executor: ExecutionContext): Unit
@@ -160,46 +161,47 @@ trait Future[+T] extends Awaitable[T] {
*/
def isCompleted: Boolean
- /** The value of this `Future`.
+ /** The current value of this `Future`.
+ *
+ * $nonDeterministic
*
* If the future is not completed the returned value will be `None`.
* If the future is completed the value will be `Some(Success(t))`
* if it contains a valid result, or `Some(Failure(error))` if it contains
* an exception.
+ *
+ * @return `None` if the `Future` wasn't completed, `Some` if it was.
*/
def value: Option[Try[T]]
/* Projections */
- /** Returns a failed projection of this future.
- *
- * The failed projection is a future holding a value of type `Throwable`.
+ /** The returned `Future` will be successfully completed with the `Throwable` of the original `Future`
+ * if the original `Future` fails.
*
- * It is completed with a value which is the throwable of the original future
- * in case the original future is failed.
+ * If the original `Future` is successful, the returned `Future` is failed with a `NoSuchElementException`.
*
- * It is failed with a `NoSuchElementException` if the original future is completed successfully.
- *
- * Blocking on this future returns a value if the original future is completed with an exception
- * and throws a corresponding exception if the original future fails.
+ * @return a failed projection of this `Future`.
*/
- def failed: Future[Throwable] = {
- implicit val ec = internalExecutor
- val p = Promise[Throwable]()
- onComplete {
- case Failure(t) => p success t
- case Success(v) => p failure (new NoSuchElementException("Future.failed not completed with a throwable."))
- }
- p.future
- }
+ def failed: Future[Throwable] =
+ transform({
+ case Failure(t) => Success(t)
+ case Success(v) => Failure(new NoSuchElementException("Future.failed not completed with a throwable."))
+ })(internalExecutor)
/* Monadic operations */
/** Asynchronously processes the value in the future once the value becomes available.
*
- * Will not be called if the future fails.
+ * WARNING: Will not be called if this future is never completed or if it is completed with a failure.
+ *
+ * $swallowsExceptions
+ *
+ * @tparam U only used to accept any return type of the given callback function
+ * @param f the function which will be executed if this `Future` completes with a result,
+ * the return value of `f` will be discarded.
*/
def foreach[U](f: T => U)(implicit executor: ExecutionContext): Unit = onComplete { _ foreach f }
@@ -208,33 +210,49 @@ trait Future[+T] extends Awaitable[T] {
* exception thrown when 's' or 'f' is applied, that exception will be propagated
* to the resulting future.
*
- * @param s function that transforms a successful result of the receiver into a
- * successful result of the returned future
- * @param f function that transforms a failure of the receiver into a failure of
- * the returned future
- * @return a future that will be completed with the transformed value
- */
- def transform[S](s: T => S, f: Throwable => Throwable)(implicit executor: ExecutionContext): Future[S] = {
- val p = Promise[S]()
- // transform on Try has the wrong shape for us here
- onComplete {
- case Success(r) => p complete Try(s(r))
- case Failure(t) => p complete Try(throw f(t)) // will throw fatal errors!
+ * @tparam S the type of the returned `Future`
+ * @param s function that transforms a successful result of the receiver into a successful result of the returned future
+ * @param f function that transforms a failure of the receiver into a failure of the returned future
+ * @return a `Future` that will be completed with the transformed value
+ */
+ def transform[S](s: T => S, f: Throwable => Throwable)(implicit executor: ExecutionContext): Future[S] =
+ transform {
+ case Success(r) => Try(s(r))
+ case Failure(t) => Try(throw f(t)) // will throw fatal errors!
}
- p.future
- }
+
+ /** Creates a new Future by applying the specified function to the result
+ * of this Future. If there is any non-fatal exception thrown when 'f'
+ * is applied then that exception will be propagated to the resulting future.
+ *
+ * @tparam S the type of the returned `Future`
+ * @param f function that transforms the result of this future
+ * @return a `Future` that will be completed with the transformed value
+ */
+ def transform[S](f: Try[T] => Try[S])(implicit executor: ExecutionContext): Future[S]
+
+ /** Creates a new Future by applying the specified function, which produces a Future, to the result
+ * of this Future. If there is any non-fatal exception thrown when 'f'
+ * is applied then that exception will be propagated to the resulting future.
+ *
+ * @tparam S the type of the returned `Future`
+ * @param f function that transforms the result of this future
+ * @return a `Future` that will be completed with the transformed value
+ */
+ def transformWith[S](f: Try[T] => Future[S])(implicit executor: ExecutionContext): Future[S]
+
/** Creates a new future by applying a function to the successful result of
* this future. If this future is completed with an exception then the new
* future will also contain this exception.
*
* $forComprehensionExamples
+ *
+ * @tparam S the type of the returned `Future`
+ * @param f the function which will be applied to the successful result of this `Future`
+ * @return a `Future` which will be completed with the result of the application of the function
*/
- def map[S](f: T => S)(implicit executor: ExecutionContext): Future[S] = { // transform(f, identity)
- val p = Promise[S]()
- onComplete { v => p complete (v map f) }
- p.future
- }
+ def map[S](f: T => S)(implicit executor: ExecutionContext): Future[S] = transform(_.map(f))
/** Creates a new future by applying a function to the successful result of
* this future, and returns the result of the function as the new future.
@@ -242,21 +260,23 @@ trait Future[+T] extends Awaitable[T] {
* also contain this exception.
*
* $forComprehensionExamples
+ *
+ * @tparam S the type of the returned `Future`
+ * @param f the function which will be applied to the successful result of this `Future`
+ * @return a `Future` which will be completed with the result of the application of the function
*/
- def flatMap[S](f: T => Future[S])(implicit executor: ExecutionContext): Future[S] = {
- import impl.Promise.DefaultPromise
- val p = new DefaultPromise[S]()
- onComplete {
- case f: Failure[_] => p complete f.asInstanceOf[Failure[S]]
- case Success(v) => try f(v) match {
- // If possible, link DefaultPromises to avoid space leaks
- case dp: DefaultPromise[_] => dp.asInstanceOf[DefaultPromise[S]].linkRootOf(p)
- case fut => fut.onComplete(p.complete)(internalExecutor)
- } catch { case NonFatal(t) => p failure t }
- }
- p.future
+ def flatMap[S](f: T => Future[S])(implicit executor: ExecutionContext): Future[S] = transformWith {
+ case Success(s) => f(s)
+ case Failure(_) => this.asInstanceOf[Future[S]]
}
+ /** Creates a new future with one level of nesting flattened, this method is equivalent
+ * to `flatMap(identity)`.
+ *
+ * @tparam S the type of the returned `Future`
+ */
+ def flatten[S](implicit ev: T <:< Future[S]): Future[S] = flatMap(ev)(internalExecutor)
+
/** Creates a new future by filtering the value of the current future with a predicate.
*
* If the current future contains a value which satisfies the predicate, the new future will also hold that value.
@@ -269,14 +289,15 @@ trait Future[+T] extends Awaitable[T] {
* val f = Future { 5 }
* val g = f filter { _ % 2 == 1 }
* val h = f filter { _ % 2 == 0 }
- * Await.result(g, Duration.Zero) // evaluates to 5
+ * g foreach println // Eventually prints 5
* Await.result(h, Duration.Zero) // throw a NoSuchElementException
* }}}
+ *
+ * @param p the predicate to apply to the successful result of this `Future`
+ * @return a `Future` which will hold the successful result of this `Future` if it matches the predicate or a `NoSuchElementException`
*/
def filter(@deprecatedName('pred) p: T => Boolean)(implicit executor: ExecutionContext): Future[T] =
- map {
- r => if (p(r)) r else throw new NoSuchElementException("Future.filter predicate is not satisfied")
- }
+ map { r => if (p(r)) r else throw new NoSuchElementException("Future.filter predicate is not satisfied") }
/** Used by for-comprehensions.
*/
@@ -298,9 +319,13 @@ trait Future[+T] extends Awaitable[T] {
* val h = f collect {
* case x if x > 0 => x * 2
* }
- * Await.result(g, Duration.Zero) // evaluates to 5
+ * g foreach println // Eventually prints 5
* Await.result(h, Duration.Zero) // throw a NoSuchElementException
* }}}
+ *
+ * @tparam S the type of the returned `Future`
+ *  @param pf the `PartialFunction` to apply to the successful result of this `Future`
+ * @return a `Future` holding the result of application of the `PartialFunction` or a `NoSuchElementException`
*/
def collect[S](pf: PartialFunction[T, S])(implicit executor: ExecutionContext): Future[S] =
map {
@@ -318,12 +343,13 @@ trait Future[+T] extends Awaitable[T] {
* Future (6 / 0) recover { case e: NotFoundException => 0 } // result: exception
* Future (6 / 2) recover { case e: ArithmeticException => 0 } // result: 3
* }}}
+ *
+ * @tparam U the type of the returned `Future`
+ * @param pf the `PartialFunction` to apply if this `Future` fails
+ * @return a `Future` with the successful value of this `Future` or the result of the `PartialFunction`
*/
- def recover[U >: T](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = {
- val p = Promise[U]()
- onComplete { v => p complete (v recover pf) }
- p.future
- }
+ def recover[U >: T](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] =
+ transform { _ recover pf }
/** Creates a new future that will handle any matching throwable that this
* future might contain by assigning it a value of another future.
@@ -337,15 +363,16 @@ trait Future[+T] extends Awaitable[T] {
* val f = Future { Int.MaxValue }
* Future (6 / 0) recoverWith { case e: ArithmeticException => f } // result: Int.MaxValue
* }}}
+ *
+ * @tparam U the type of the returned `Future`
+ * @param pf the `PartialFunction` to apply if this `Future` fails
+ * @return a `Future` with the successful value of this `Future` or the outcome of the `Future` returned by the `PartialFunction`
*/
- def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = {
- val p = Promise[U]()
- onComplete {
- case Failure(t) => try pf.applyOrElse(t, (_: Throwable) => this).onComplete(p.complete)(internalExecutor) catch { case NonFatal(t) => p failure t }
- case other => p complete other
+ def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] =
+ transformWith {
+ case Failure(t) => pf.applyOrElse(t, (_: Throwable) => this)
+ case Success(_) => this
}
- p.future
- }
/** Zips the values of `this` and `that` future, and creates
* a new future holding the tuple of their results.
@@ -354,17 +381,35 @@ trait Future[+T] extends Awaitable[T] {
* with the throwable stored in `this`.
* Otherwise, if `that` future fails, the resulting future is failed
* with the throwable stored in `that`.
+ *
+ * @tparam U the type of the other `Future`
+ * @param that the other `Future`
+ * @return a `Future` with the results of both futures or the failure of the first of them that failed
*/
def zip[U](that: Future[U]): Future[(T, U)] = {
implicit val ec = internalExecutor
- val p = Promise[(T, U)]()
- onComplete {
- case f: Failure[_] => p complete f.asInstanceOf[Failure[(T, U)]]
- case Success(s) => that onComplete { c => p.complete(c map { s2 => (s, s2) }) }
- }
- p.future
+ flatMap { r1 => that.map(r2 => (r1, r2)) }
}
+ /** Zips the values of `this` and `that` future using a function `f`,
+ * and creates a new future holding the result.
+ *
+ * If `this` future fails, the resulting future is failed
+ * with the throwable stored in `this`.
+ * Otherwise, if `that` future fails, the resulting future is failed
+ * with the throwable stored in `that`.
+ * If the application of `f` throws a throwable, the resulting future
+ * is failed with that throwable if it is non-fatal.
+ *
+ * @tparam U the type of the other `Future`
+ * @tparam R the type of the resulting `Future`
+ * @param that the other `Future`
+ * @param f the function to apply to the results of `this` and `that`
+ * @return a `Future` with the result of the application of `f` to the results of `this` and `that`
+ */
+ def zipWith[U, R](that: Future[U])(f: (T, U) => R)(implicit executor: ExecutionContext): Future[R] =
+ flatMap(r1 => that.map(r2 => f(r1, r2)))(internalExecutor)
+
/** Creates a new future which holds the result of this future if it was completed successfully, or, if not,
* the result of the `that` future if `that` is completed successfully.
* If both futures are failed, the resulting future holds the throwable object of the first future.
@@ -376,24 +421,26 @@ trait Future[+T] extends Awaitable[T] {
* val f = Future { sys.error("failed") }
* val g = Future { 5 }
* val h = f fallbackTo g
- * Await.result(h, Duration.Zero) // evaluates to 5
+ * h foreach println // Eventually prints 5
* }}}
+ *
+ * @tparam U the type of the other `Future` and the resulting `Future`
+ * @param that the `Future` whose result we want to use if this `Future` fails.
+ * @return a `Future` with the successful result of this or that `Future` or the failure of this `Future` if both fail
*/
- def fallbackTo[U >: T](that: Future[U]): Future[U] = {
- implicit val ec = internalExecutor
- val p = Promise[U]()
- onComplete {
- case s @ Success(_) => p complete s
- case f @ Failure(_) => that onComplete {
- case s2 @ Success(_) => p complete s2
- case _ => p complete f // Use the first failure as the failure
- }
+ def fallbackTo[U >: T](that: Future[U]): Future[U] =
+ if (this eq that) this
+ else {
+ implicit val ec = internalExecutor
+ recoverWith { case _ => that } recoverWith { case _ => this }
}
- p.future
- }
/** Creates a new `Future[S]` which is completed with this `Future`'s result if
* that conforms to `S`'s erased type or a `ClassCastException` otherwise.
+ *
+ * @tparam S the type of the returned `Future`
+ * @param tag the `ClassTag` which will be used to cast the result of this `Future`
+ * @return a `Future` holding the casted result of this `Future` or a `ClassCastException` otherwise
*/
def mapTo[S](implicit tag: ClassTag[S]): Future[S] = {
implicit val ec = internalExecutor
@@ -427,15 +474,19 @@ trait Future[+T] extends Awaitable[T] {
* case Success(v) => println(v)
* }
* }}}
+ *
+ * @tparam U only used to accept any return type of the given `PartialFunction`
+ * @param pf a `PartialFunction` which will be conditionally applied to the outcome of this `Future`
+ * @return a `Future` which will be completed with the exact same outcome as this `Future` but after the `PartialFunction` has been executed.
*/
- def andThen[U](pf: PartialFunction[Try[T], U])(implicit executor: ExecutionContext): Future[T] = {
- val p = Promise[T]()
- onComplete {
- case r => try pf.applyOrElse[Try[T], Any](r, Predef.conforms[Try[T]]) finally p complete r
- }
- p.future
- }
+ def andThen[U](pf: PartialFunction[Try[T], U])(implicit executor: ExecutionContext): Future[T] =
+ transform {
+ result =>
+ try pf.applyOrElse[Try[T], Any](result, Predef.conforms[Try[T]])
+ catch { case NonFatal(t) => executor reportFailure t }
+ result
+ }
}
@@ -459,40 +510,102 @@ object Future {
classOf[Unit] -> classOf[scala.runtime.BoxedUnit]
)
+ /** A Future which is never completed.
+ */
+ final object never extends Future[Nothing] {
+
+ @throws(classOf[TimeoutException])
+ @throws(classOf[InterruptedException])
+ override def ready(atMost: Duration)(implicit permit: CanAwait): this.type = {
+ atMost match {
+ case e if e eq Duration.Undefined => throw new IllegalArgumentException("cannot wait for Undefined period")
+ case Duration.Inf => new CountDownLatch(1).await()
+ case Duration.MinusInf => // Drop out
+ case f: FiniteDuration =>
+ if (f > Duration.Zero) new CountDownLatch(1).await(f.toNanos, TimeUnit.NANOSECONDS)
+ }
+ throw new TimeoutException(s"Future timed out after [$atMost]")
+ }
+
+ @throws(classOf[Exception])
+ override def result(atMost: Duration)(implicit permit: CanAwait): Nothing = {
+ ready(atMost)
+ throw new TimeoutException(s"Future timed out after [$atMost]")
+ }
+
+ override def onSuccess[U](pf: PartialFunction[Nothing, U])(implicit executor: ExecutionContext): Unit = ()
+ override def onFailure[U](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = ()
+ override def onComplete[U](f: Try[Nothing] => U)(implicit executor: ExecutionContext): Unit = ()
+ override def isCompleted: Boolean = false
+ override def value: Option[Try[Nothing]] = None
+ override def failed: Future[Throwable] = this
+ override def foreach[U](f: Nothing => U)(implicit executor: ExecutionContext): Unit = ()
+ override def transform[S](s: Nothing => S, f: Throwable => Throwable)(implicit executor: ExecutionContext): Future[S] = this
+ override def transform[S](f: Try[Nothing] => Try[S])(implicit executor: ExecutionContext): Future[S] = this
+ override def transformWith[S](f: Try[Nothing] => Future[S])(implicit executor: ExecutionContext): Future[S] = this
+ override def map[S](f: Nothing => S)(implicit executor: ExecutionContext): Future[S] = this
+ override def flatMap[S](f: Nothing => Future[S])(implicit executor: ExecutionContext): Future[S] = this
+ override def flatten[S](implicit ev: Nothing <:< Future[S]): Future[S] = this
+ override def filter(p: Nothing => Boolean)(implicit executor: ExecutionContext): Future[Nothing] = this
+ override def collect[S](pf: PartialFunction[Nothing, S])(implicit executor: ExecutionContext): Future[S] = this
+ override def recover[U >: Nothing](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = this
+ override def recoverWith[U >: Nothing](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = this
+ override def zip[U](that: Future[U]): Future[(Nothing, U)] = this
+ override def zipWith[U, R](that: Future[U])(f: (Nothing, U) => R)(implicit executor: ExecutionContext): Future[R] = this
+ override def fallbackTo[U >: Nothing](that: Future[U]): Future[U] = this
+ override def mapTo[S](implicit tag: ClassTag[S]): Future[S] = this
+ override def andThen[U](pf: PartialFunction[Try[Nothing], U])(implicit executor: ExecutionContext): Future[Nothing] = this
+
+ override def toString: String = "Future(<never>)"
+ }
+
+ /** A Future which is always completed with the Unit value.
+ */
+ val unit: Future[Unit] = successful(())
+
/** Creates an already completed Future with the specified exception.
*
- * @tparam T the type of the value in the future
- * @return the newly created `Future` object
+ * @tparam T the type of the value in the future
+ * @param exception the non-null instance of `Throwable`
+ * @return the newly created `Future` instance
*/
def failed[T](exception: Throwable): Future[T] = Promise.failed(exception).future
/** Creates an already completed Future with the specified result.
*
* @tparam T the type of the value in the future
- * @return the newly created `Future` object
+ * @param result the given successful value
+ * @return the newly created `Future` instance
*/
def successful[T](result: T): Future[T] = Promise.successful(result).future
/** Creates an already completed Future with the specified result or exception.
*
- * @tparam T the type of the value in the promise
- * @return the newly created `Future` object
+ * @tparam T the type of the value in the `Future`
+ * @param result the result of the returned `Future` instance
+ * @return the newly created `Future` instance
*/
def fromTry[T](result: Try[T]): Future[T] = Promise.fromTry(result).future
- /** Starts an asynchronous computation and returns a `Future` object with the result of that computation.
+ /** Starts an asynchronous computation and returns a `Future` instance with the result of that computation.
*
* The result becomes available once the asynchronous computation is completed.
*
- * @tparam T the type of the result
- * @param body the asychronous computation
+ * @tparam T the type of the result
+ * @param body the asychronous computation
* @param executor the execution context on which the future is run
- * @return the `Future` holding the result of the computation
+ * @return the `Future` holding the result of the computation
*/
- def apply[T](body: =>T)(implicit @deprecatedName('execctx) executor: ExecutionContext): Future[T] = impl.Future(body)
+ def apply[T](body: =>T)(implicit @deprecatedName('execctx) executor: ExecutionContext): Future[T] =
+ unit.map(_ => body)
- /** Simple version of `Future.traverse`. Transforms a `TraversableOnce[Future[A]]` into a `Future[TraversableOnce[A]]`.
- * Useful for reducing many `Future`s into a single `Future`.
+ /** Simple version of `Future.traverse`. Asynchronously and non-blockingly transforms a `TraversableOnce[Future[A]]`
+ * into a `Future[TraversableOnce[A]]`. Useful for reducing many `Future`s into a single `Future`.
+ *
+ * @tparam A the type of the value inside the Futures
+ * @tparam M the type of the `TraversableOnce` of Futures
+ * @param in the `TraversableOnce` of Futures which will be sequenced
+ * @return the `Future` of the `TraversableOnce` of results
*/
def sequence[A, M[X] <: TraversableOnce[X]](in: M[Future[A]])(implicit cbf: CanBuildFrom[M[Future[A]], A, M[A]], executor: ExecutionContext): Future[M[A]] = {
in.foldLeft(successful(cbf(in))) {
@@ -500,7 +613,12 @@ object Future {
} map (_.result())
}
- /** Returns a new `Future` to the result of the first future in the list that is completed.
+ /** Asynchronously and non-blockingly returns a new `Future` to the result of the first future
+ * in the list that is completed. This means no matter if it is completed as a success or as a failure.
+ *
+ * @tparam T the type of the value in the future
+ * @param futures the `TraversableOnce` of Futures in which to find the first completed
+ * @return the `Future` holding the result of the future that is first to be completed
*/
def firstCompletedOf[T](futures: TraversableOnce[Future[T]])(implicit executor: ExecutionContext): Future[T] = {
val p = Promise[T]()
@@ -509,8 +627,15 @@ object Future {
p.future
}
- /** Returns a `Future` that will hold the optional result of the first `Future` with a result that matches the predicate.
+ /** Asynchronously and non-blockingly returns a `Future` that will hold the optional result
+ * of the first `Future` with a result that matches the predicate.
+ *
+ * @tparam T the type of the value in the future
+ * @param futures the `TraversableOnce` of Futures to search
+ * @param p the predicate which indicates if it's a match
+ * @return the `Future` holding the optional result of the search
*/
+ @deprecated("Use the overloaded version of this method that takes a scala.collection.immutable.Iterable instead", "2.12")
def find[T](@deprecatedName('futurestravonce) futures: TraversableOnce[Future[T]])(@deprecatedName('predicate) p: T => Boolean)(implicit executor: ExecutionContext): Future[Option[T]] = {
val futuresBuffer = futures.toBuffer
if (futuresBuffer.isEmpty) successful[Option[T]](None)
@@ -534,40 +659,127 @@ object Future {
}
}
- /** A non-blocking fold over the specified futures, with the start value of the given zero.
+
+ /** Asynchronously and non-blockingly returns a `Future` that will hold the optional result
+ * of the first `Future` with a result that matches the predicate, failed `Future`s will be ignored.
+ *
+ * @tparam T the type of the value in the future
+ * @param futures the `scala.collection.immutable.Iterable` of Futures to search
+ * @param p the predicate which indicates if it's a match
+ * @return the `Future` holding the optional result of the search
+ */
+ def find[T](futures: scala.collection.immutable.Iterable[Future[T]])(p: T => Boolean)(implicit executor: ExecutionContext): Future[Option[T]] = {
+ def searchNext(i: Iterator[Future[T]]): Future[Option[T]] =
+ if (!i.hasNext) successful[Option[T]](None)
+ else {
+ i.next().transformWith {
+ case Success(r) if p(r) => successful(Some(r))
+ case other => searchNext(i)
+ }
+ }
+ searchNext(futures.iterator)
+ }
+
+ /** A non-blocking, asynchronous left fold over the specified futures,
+ * with the start value of the given zero.
+ * The fold is performed asynchronously in left-to-right order as the futures become completed.
+ * The result will be the first failure of any of the futures, or any failure in the actual fold,
+ * or the result of the fold.
+ *
+ * Example:
+ * {{{
+ * val futureSum = Future.foldLeft(futures)(0)(_ + _)
+ * }}}
+ *
+ * @tparam T the type of the value of the input Futures
+ * @tparam R the type of the value of the returned `Future`
+ * @param futures the `scala.collection.immutable.Iterable` of Futures to be folded
+ * @param zero the start value of the fold
+ * @param op the fold operation to be applied to the zero and futures
+ * @return the `Future` holding the result of the fold
+ */
+ def foldLeft[T, R](futures: scala.collection.immutable.Iterable[Future[T]])(zero: R)(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] =
+ foldNext(futures.iterator, zero, op)
+
+ private[this] def foldNext[T, R](i: Iterator[Future[T]], prevValue: R, op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] =
+ if (!i.hasNext) successful(prevValue)
+ else i.next().flatMap { value => foldNext(i, op(prevValue, value), op) }
+
+ /** A non-blocking, asynchronous fold over the specified futures, with the start value of the given zero.
* The fold is performed on the thread where the last future is completed,
* the result will be the first failure of any of the futures, or any failure in the actual fold,
* or the result of the fold.
*
* Example:
* {{{
- * val result = Await.result(Future.fold(futures)(0)(_ + _), 5 seconds)
+ * val futureSum = Future.fold(futures)(0)(_ + _)
* }}}
+ *
+ * @tparam T the type of the value of the input Futures
+ * @tparam R the type of the value of the returned `Future`
+ * @param futures the `TraversableOnce` of Futures to be folded
+ * @param zero the start value of the fold
+ * @param op the fold operation to be applied to the zero and futures
+ * @return the `Future` holding the result of the fold
*/
+ @deprecated("Use Future.foldLeft instead", "2.12")
def fold[T, R](futures: TraversableOnce[Future[T]])(zero: R)(@deprecatedName('foldFun) op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
if (futures.isEmpty) successful(zero)
else sequence(futures).map(_.foldLeft(zero)(op))
}
- /** Initiates a fold over the supplied futures where the fold-zero is the result value of the `Future` that's completed first.
+ /** Initiates a non-blocking, asynchronous, fold over the supplied futures
+ * where the fold-zero is the result value of the `Future` that's completed first.
*
* Example:
* {{{
- * val result = Await.result(Future.reduce(futures)(_ + _), 5 seconds)
+ * val futureSum = Future.reduce(futures)(_ + _)
* }}}
+ * @tparam T the type of the value of the input Futures
+ * @tparam R the type of the value of the returned `Future`
+ * @param futures the `TraversableOnce` of Futures to be reduced
+ * @param op the reduce operation which is applied to the results of the futures
+ * @return the `Future` holding the result of the reduce
*/
+ @deprecated("Use Future.reduceLeft instead", "2.12")
def reduce[T, R >: T](futures: TraversableOnce[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
if (futures.isEmpty) failed(new NoSuchElementException("reduce attempted on empty collection"))
else sequence(futures).map(_ reduceLeft op)
}
- /** Transforms a `TraversableOnce[A]` into a `Future[TraversableOnce[B]]` using the provided function `A => Future[B]`.
+ /** Initiates a non-blocking, asynchronous, left reduction over the supplied futures
+ * where the zero is the result value of the first `Future`.
+ *
+ * Example:
+ * {{{
+ * val futureSum = Future.reduceLeft(futures)(_ + _)
+ * }}}
+ * @tparam T the type of the value of the input Futures
+ * @tparam R the type of the value of the returned `Future`
+ * @param futures the `scala.collection.immutable.Iterable` of Futures to be reduced
+ * @param op the reduce operation which is applied to the results of the futures
+ * @return the `Future` holding the result of the reduce
+ */
+ def reduceLeft[T, R >: T](futures: scala.collection.immutable.Iterable[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
+ val i = futures.iterator
+ if (!i.hasNext) failed(new NoSuchElementException("reduceLeft attempted on empty collection"))
+ else i.next() flatMap { v => foldNext(i, v, op) }
+ }
+
+ /** Asynchronously and non-blockingly transforms a `TraversableOnce[A]` into a `Future[TraversableOnce[B]]`
+ * using the provided function `A => Future[B]`.
* This is useful for performing a parallel map. For example, to apply a function to all items of a list
* in parallel:
*
* {{{
* val myFutureList = Future.traverse(myList)(x => Future(myFunc(x)))
* }}}
+ * @tparam A the type of the value inside the Futures in the `TraversableOnce`
+ * @tparam B the type of the value of the returned `Future`
+ * @tparam M the type of the `TraversableOnce` of Futures
+ * @param in the `TraversableOnce` of Futures which will be sequenced
+ * @param fn the function to apply to the `TraversableOnce` of Futures to produce the results
+ * @return the `Future` of the `TraversableOnce` of results
*/
def traverse[A, B, M[X] <: TraversableOnce[X]](in: M[A])(fn: A => Future[B])(implicit cbf: CanBuildFrom[M[A], B, M[B]], executor: ExecutionContext): Future[M[B]] =
in.foldLeft(successful(cbf(in))) { (fr, a) =>
@@ -575,6 +787,7 @@ object Future {
for (r <- fr; b <- fb) yield (r += b)
}.map(_.result())
+
// This is used to run callbacks which are internal
// to scala.concurrent; our own callbacks are only
// ever used to eventually run another callback,
diff --git a/src/library/scala/concurrent/Promise.scala b/src/library/scala/concurrent/Promise.scala
index eb8044ed3b..dc4376eba4 100644
--- a/src/library/scala/concurrent/Promise.scala
+++ b/src/library/scala/concurrent/Promise.scala
@@ -26,12 +26,6 @@ import scala.util.{ Try, Success, Failure }
* Note: Using this method may result in non-deterministic concurrent programs.
*/
trait Promise[T] {
-
- // used for internal callbacks defined in
- // the lexical scope of this trait;
- // _never_ for application callbacks.
- private implicit def internalExecutor: ExecutionContext = Future.InternalCallbackExecutor
-
/** Future containing the value of this promise.
*/
def future: Future[T]
@@ -67,7 +61,9 @@ trait Promise[T] {
* @return This promise
*/
final def completeWith(other: Future[T]): this.type = {
- other onComplete { this complete _ }
+ if (other ne this.future) { // this completeWith this doesn't make much sense
+ other.onComplete(this complete _)(Future.InternalCallbackExecutor)
+ }
this
}
@@ -76,7 +72,9 @@ trait Promise[T] {
* @return This promise
*/
final def tryCompleteWith(other: Future[T]): this.type = {
- other onComplete { this tryComplete _ }
+ if (other ne this.future) { // this tryCompleteWith this doesn't make much sense
+ other.onComplete(this tryComplete _)(Future.InternalCallbackExecutor)
+ }
this
}
@@ -142,5 +140,5 @@ object Promise {
* @tparam T the type of the value in the promise
* @return the newly created `Promise` object
*/
- def fromTry[T](result: Try[T]): Promise[T] = new impl.Promise.KeptPromise[T](result)
+ def fromTry[T](result: Try[T]): Promise[T] = impl.Promise.KeptPromise[T](result)
}
diff --git a/src/library/scala/concurrent/impl/AbstractPromise.java b/src/library/scala/concurrent/impl/AbstractPromise.java
deleted file mode 100644
index b8165b6cde..0000000000
--- a/src/library/scala/concurrent/impl/AbstractPromise.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.concurrent.impl;
-
-
-import scala.concurrent.util.Unsafe;
-import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
-
-
-
-abstract class AbstractPromise {
- private volatile Object _ref;
-
- final static long _refoffset;
-
- static {
- try {
- _refoffset = Unsafe.instance.objectFieldOffset(AbstractPromise.class.getDeclaredField("_ref"));
- } catch (Throwable t) {
- throw new ExceptionInInitializerError(t);
- }
- }
-
- protected final boolean updateState(Object oldState, Object newState) {
- return Unsafe.instance.compareAndSwapObject(this, _refoffset, oldState, newState);
- }
-
- protected final Object getState() {
- return _ref;
- }
-
- protected final static AtomicReferenceFieldUpdater<AbstractPromise, Object> updater =
- AtomicReferenceFieldUpdater.newUpdater(AbstractPromise.class, Object.class, "_ref");
-} \ No newline at end of file
diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
index 479720287c..0c7f98ce5a 100644
--- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
+++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
@@ -11,52 +11,88 @@ package scala.concurrent.impl
import java.util.concurrent.{ LinkedBlockingQueue, Callable, Executor, ExecutorService, Executors, ThreadFactory, TimeUnit, ThreadPoolExecutor }
+import java.util.concurrent.atomic.AtomicInteger
import java.util.Collection
import scala.concurrent.forkjoin._
import scala.concurrent.{ BlockContext, ExecutionContext, Awaitable, CanAwait, ExecutionContextExecutor, ExecutionContextExecutorService }
import scala.util.control.NonFatal
+import scala.annotation.tailrec
+private[scala] class ExecutionContextImpl private[impl] (val executor: Executor, val reporter: Throwable => Unit) extends ExecutionContextExecutor {
+ require(executor ne null, "Executor must not be null")
+ override def execute(runnable: Runnable) = executor execute runnable
+ override def reportFailure(t: Throwable) = reporter(t)
+}
-private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter: Throwable => Unit) extends ExecutionContextExecutor {
- // Placed here since the creation of the executor needs to read this val
- private[this] val uncaughtExceptionHandler: Thread.UncaughtExceptionHandler = new Thread.UncaughtExceptionHandler {
- def uncaughtException(thread: Thread, cause: Throwable): Unit = reporter(cause)
- }
- val executor: Executor = es match {
- case null => createExecutorService
- case some => some
- }
+private[concurrent] object ExecutionContextImpl {
// Implement BlockContext on FJP threads
- class DefaultThreadFactory(daemonic: Boolean) extends ThreadFactory with ForkJoinPool.ForkJoinWorkerThreadFactory {
+ final class DefaultThreadFactory(
+ daemonic: Boolean,
+ maxThreads: Int,
+ prefix: String,
+ uncaught: Thread.UncaughtExceptionHandler) extends ThreadFactory with ForkJoinPool.ForkJoinWorkerThreadFactory {
+
+ require(prefix ne null, "DefaultThreadFactory.prefix must be non null")
+ require(maxThreads > 0, "DefaultThreadFactory.maxThreads must be greater than 0")
+
+ private final val currentNumberOfThreads = new AtomicInteger(0)
+
+ @tailrec private final def reserveThread(): Boolean = currentNumberOfThreads.get() match {
+ case `maxThreads` | Int.`MaxValue` => false
+ case other => currentNumberOfThreads.compareAndSet(other, other + 1) || reserveThread()
+ }
+
+ @tailrec private final def deregisterThread(): Boolean = currentNumberOfThreads.get() match {
+ case 0 => false
+ case other => currentNumberOfThreads.compareAndSet(other, other - 1) || deregisterThread()
+ }
+
def wire[T <: Thread](thread: T): T = {
thread.setDaemon(daemonic)
- thread.setUncaughtExceptionHandler(uncaughtExceptionHandler)
+ thread.setUncaughtExceptionHandler(uncaught)
+ thread.setName(prefix + "-" + thread.getId())
thread
}
- def newThread(runnable: Runnable): Thread = wire(new Thread(runnable))
-
- def newThread(fjp: ForkJoinPool): ForkJoinWorkerThread = wire(new ForkJoinWorkerThread(fjp) with BlockContext {
- override def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T = {
- var result: T = null.asInstanceOf[T]
- ForkJoinPool.managedBlock(new ForkJoinPool.ManagedBlocker {
- @volatile var isdone = false
- override def block(): Boolean = {
- result = try thunk finally { isdone = true }
- true
+ // As per ThreadFactory contract newThread should return `null` if cannot create new thread.
+ def newThread(runnable: Runnable): Thread =
+ if (reserveThread())
+ wire(new Thread(new Runnable {
+ // We have to decrement the current thread count when the thread exits
+ override def run() = try runnable.run() finally deregisterThread()
+ })) else null
+
+ def newThread(fjp: ForkJoinPool): ForkJoinWorkerThread =
+ if (reserveThread()) {
+ wire(new ForkJoinWorkerThread(fjp) with BlockContext {
+ // We have to decrement the current thread count when the thread exits
+ final override def onTermination(exception: Throwable): Unit = deregisterThread()
+ final override def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T = {
+ var result: T = null.asInstanceOf[T]
+ ForkJoinPool.managedBlock(new ForkJoinPool.ManagedBlocker {
+ @volatile var isdone = false
+ override def block(): Boolean = {
+ result = try {
+ // When we block, switch out the BlockContext temporarily so that nested blocking does not created N new Threads
+ BlockContext.withBlockContext(BlockContext.defaultBlockContext) { thunk }
+ } finally {
+ isdone = true
+ }
+
+ true
+ }
+ override def isReleasable = isdone
+ })
+ result
}
- override def isReleasable = isdone
})
- result
- }
- })
+ } else null
}
- def createExecutorService: ExecutorService = {
-
+ def createDefaultExecutorService(reporter: Throwable => Unit): ExecutorService = {
def getInt(name: String, default: String) = (try System.getProperty(name, default) catch {
case e: SecurityException => default
}) match {
@@ -65,20 +101,42 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
}
def range(floor: Int, desired: Int, ceiling: Int) = scala.math.min(scala.math.max(floor, desired), ceiling)
+ val numThreads = getInt("scala.concurrent.context.numThreads", "x1")
+ // The hard limit on the number of active threads that the thread factory will produce
+ // SI-8955 Deadlocks can happen if maxNoOfThreads is too low, although we're currently not sure
+ // about what the exact threshhold is. numThreads + 256 is conservatively high.
+ val maxNoOfThreads = getInt("scala.concurrent.context.maxThreads", "x1")
val desiredParallelism = range(
getInt("scala.concurrent.context.minThreads", "1"),
- getInt("scala.concurrent.context.numThreads", "x1"),
- getInt("scala.concurrent.context.maxThreads", "x1"))
+ numThreads,
+ maxNoOfThreads)
+
+ // The thread factory must provide additional threads to support managed blocking.
+ val maxExtraThreads = getInt("scala.concurrent.context.maxExtraThreads", "256")
+
+ val uncaughtExceptionHandler: Thread.UncaughtExceptionHandler = new Thread.UncaughtExceptionHandler {
+ override def uncaughtException(thread: Thread, cause: Throwable): Unit = reporter(cause)
+ }
- val threadFactory = new DefaultThreadFactory(daemonic = true)
+ val threadFactory = new ExecutionContextImpl.DefaultThreadFactory(daemonic = true,
+ maxThreads = maxNoOfThreads + maxExtraThreads,
+ prefix = "scala-execution-context-global",
+ uncaught = uncaughtExceptionHandler)
try {
- new ForkJoinPool(
- desiredParallelism,
- threadFactory,
- uncaughtExceptionHandler,
- true) // Async all the way baby
+ new ForkJoinPool(desiredParallelism, threadFactory, uncaughtExceptionHandler, true) {
+ override def execute(runnable: Runnable): Unit = {
+ val fjt: ForkJoinTask[_] = runnable match {
+ case t: ForkJoinTask[_] => t
+ case r => new ExecutionContextImpl.AdaptedForkJoinTask(r)
+ }
+ Thread.currentThread match {
+ case fjw: ForkJoinWorkerThread if fjw.getPool eq this => fjt.fork()
+ case _ => super.execute(fjt)
+ }
+ }
+ }
} catch {
case NonFatal(t) =>
System.err.println("Failed to create ForkJoinPool for the default ExecutionContext, falling back to ThreadPoolExecutor")
@@ -96,56 +154,42 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
}
}
- def execute(runnable: Runnable): Unit = executor match {
- case fj: ForkJoinPool =>
- val fjt: ForkJoinTask[_] = runnable match {
- case t: ForkJoinTask[_] => t
- case r => new ExecutionContextImpl.AdaptedForkJoinTask(r)
- }
- Thread.currentThread match {
- case fjw: ForkJoinWorkerThread if fjw.getPool eq fj => fjt.fork()
- case _ => fj execute fjt
- }
- case generic => generic execute runnable
- }
-
- def reportFailure(t: Throwable) = reporter(t)
-}
-
-
-private[concurrent] object ExecutionContextImpl {
-
final class AdaptedForkJoinTask(runnable: Runnable) extends ForkJoinTask[Unit] {
- final override def setRawResult(u: Unit): Unit = ()
- final override def getRawResult(): Unit = ()
- final override def exec(): Boolean = try { runnable.run(); true } catch {
- case anything: Throwable ⇒
- val t = Thread.currentThread
- t.getUncaughtExceptionHandler match {
- case null ⇒
- case some ⇒ some.uncaughtException(t, anything)
- }
- throw anything
- }
+ final override def setRawResult(u: Unit): Unit = ()
+ final override def getRawResult(): Unit = ()
+ final override def exec(): Boolean = try { runnable.run(); true } catch {
+ case anything: Throwable =>
+ val t = Thread.currentThread
+ t.getUncaughtExceptionHandler match {
+ case null =>
+ case some => some.uncaughtException(t, anything)
}
+ throw anything
+ }
+ }
- def fromExecutor(e: Executor, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl = new ExecutionContextImpl(e, reporter)
- def fromExecutorService(es: ExecutorService, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl with ExecutionContextExecutorService =
- new ExecutionContextImpl(es, reporter) with ExecutionContextExecutorService {
- final def asExecutorService: ExecutorService = executor.asInstanceOf[ExecutorService]
- override def execute(command: Runnable) = executor.execute(command)
- override def shutdown() { asExecutorService.shutdown() }
- override def shutdownNow() = asExecutorService.shutdownNow()
- override def isShutdown = asExecutorService.isShutdown
- override def isTerminated = asExecutorService.isTerminated
- override def awaitTermination(l: Long, timeUnit: TimeUnit) = asExecutorService.awaitTermination(l, timeUnit)
- override def submit[T](callable: Callable[T]) = asExecutorService.submit(callable)
- override def submit[T](runnable: Runnable, t: T) = asExecutorService.submit(runnable, t)
- override def submit(runnable: Runnable) = asExecutorService.submit(runnable)
- override def invokeAll[T](callables: Collection[_ <: Callable[T]]) = asExecutorService.invokeAll(callables)
- override def invokeAll[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = asExecutorService.invokeAll(callables, l, timeUnit)
- override def invokeAny[T](callables: Collection[_ <: Callable[T]]) = asExecutorService.invokeAny(callables)
- override def invokeAny[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = asExecutorService.invokeAny(callables, l, timeUnit)
+ def fromExecutor(e: Executor, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl =
+ new ExecutionContextImpl(Option(e).getOrElse(createDefaultExecutorService(reporter)), reporter)
+
+ def fromExecutorService(es: ExecutorService, reporter: Throwable => Unit = ExecutionContext.defaultReporter):
+ ExecutionContextImpl with ExecutionContextExecutorService = {
+ new ExecutionContextImpl(Option(es).getOrElse(createDefaultExecutorService(reporter)), reporter)
+ with ExecutionContextExecutorService {
+ final def asExecutorService: ExecutorService = executor.asInstanceOf[ExecutorService]
+ override def execute(command: Runnable) = executor.execute(command)
+ override def shutdown() { asExecutorService.shutdown() }
+ override def shutdownNow() = asExecutorService.shutdownNow()
+ override def isShutdown = asExecutorService.isShutdown
+ override def isTerminated = asExecutorService.isTerminated
+ override def awaitTermination(l: Long, timeUnit: TimeUnit) = asExecutorService.awaitTermination(l, timeUnit)
+ override def submit[T](callable: Callable[T]) = asExecutorService.submit(callable)
+ override def submit[T](runnable: Runnable, t: T) = asExecutorService.submit(runnable, t)
+ override def submit(runnable: Runnable) = asExecutorService.submit(runnable)
+ override def invokeAll[T](callables: Collection[_ <: Callable[T]]) = asExecutorService.invokeAll(callables)
+ override def invokeAll[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = asExecutorService.invokeAll(callables, l, timeUnit)
+ override def invokeAny[T](callables: Collection[_ <: Callable[T]]) = asExecutorService.invokeAny(callables)
+ override def invokeAny[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = asExecutorService.invokeAny(callables, l, timeUnit)
+ }
}
}
diff --git a/src/library/scala/concurrent/impl/Future.scala b/src/library/scala/concurrent/impl/Future.scala
deleted file mode 100644
index 042d32c234..0000000000
--- a/src/library/scala/concurrent/impl/Future.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.concurrent.impl
-
-
-
-import scala.concurrent.ExecutionContext
-import scala.util.control.NonFatal
-import scala.util.{ Success, Failure }
-
-
-private[concurrent] object Future {
- class PromiseCompletingRunnable[T](body: => T) extends Runnable {
- val promise = new Promise.DefaultPromise[T]()
-
- override def run() = {
- promise complete {
- try Success(body) catch { case NonFatal(e) => Failure(e) }
- }
- }
- }
-
- def apply[T](body: =>T)(implicit executor: ExecutionContext): scala.concurrent.Future[T] = {
- val runnable = new PromiseCompletingRunnable(body)
- executor.prepare.execute(runnable)
- runnable.promise.future
- }
-}
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index b15601058e..078ad45be9 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -16,14 +16,42 @@ import scala.util.control.NonFatal
import scala.util.{ Try, Success, Failure }
import java.io.ObjectInputStream
import java.util.concurrent.locks.AbstractQueuedSynchronizer
+import java.util.concurrent.atomic.AtomicReference
private[concurrent] trait Promise[T] extends scala.concurrent.Promise[T] with scala.concurrent.Future[T] {
def future: this.type = this
+
+ import scala.concurrent.Future
+ import scala.concurrent.impl.Promise.DefaultPromise
+
+ override def transform[S](f: Try[T] => Try[S])(implicit executor: ExecutionContext): Future[S] = {
+ val p = new DefaultPromise[S]()
+ onComplete { result => p.complete(try f(result) catch { case NonFatal(t) => Failure(t) }) }
+ p.future
+ }
+
+ // If possible, link DefaultPromises to avoid space leaks
+ override def transformWith[S](f: Try[T] => Future[S])(implicit executor: ExecutionContext): Future[S] = {
+ val p = new DefaultPromise[S]()
+ onComplete {
+ v => try f(v) match {
+ case fut if fut eq this => p complete v.asInstanceOf[Try[S]]
+ case dp: DefaultPromise[_] => dp.asInstanceOf[DefaultPromise[S]].linkRootOf(p)
+ case fut => p completeWith fut
+ } catch { case NonFatal(t) => p failure t }
+ }
+ p.future
+ }
+
+ override def toString: String = value match {
+ case Some(result) => "Future("+result+")"
+ case None => "Future(<not completed>)"
+ }
}
/* Precondition: `executor` is prepared, i.e., `executor` has been returned from invocation of `prepare` on some other `ExecutionContext`.
*/
-private class CallbackRunnable[T](val executor: ExecutionContext, val onComplete: Try[T] => Any) extends Runnable with OnCompleteRunnable {
+private final class CallbackRunnable[T](val executor: ExecutionContext, val onComplete: Try[T] => Any) extends Runnable with OnCompleteRunnable {
// must be filled in before running it
var value: Try[T] = null
@@ -89,7 +117,7 @@ private[concurrent] object Promise {
* incomplete, or as complete with the same result value.
*
* A DefaultPromise stores its state entirely in the AnyRef cell exposed by
- * AbstractPromise. The type of object stored in the cell fully describes the
+ * AtomicReference. The type of object stored in the cell fully describes the
* current state of the promise.
*
* 1. List[CallbackRunnable] - The promise is incomplete and has zero or more callbacks
@@ -150,8 +178,7 @@ private[concurrent] object Promise {
* DefaultPromises, and `linkedRootOf` is currently only designed to be called
* by Future.flatMap.
*/
- class DefaultPromise[T] extends AbstractPromise with Promise[T] { self =>
- updateState(null, Nil) // The promise is incomplete and has no callbacks
+ final class DefaultPromise[T] extends AtomicReference[AnyRef](Nil) with Promise[T] {
/** Get the root promise for this promise, compressing the link chain to that
* promise if necessary.
@@ -167,14 +194,23 @@ private[concurrent] object Promise {
* be garbage collected. Also, subsequent calls to this method should be
* faster as the link chain will be shorter.
*/
- @tailrec
- private def compressedRoot(): DefaultPromise[T] = {
- getState match {
- case linked: DefaultPromise[_] =>
- val target = linked.asInstanceOf[DefaultPromise[T]].root
- if (linked eq target) target else if (updateState(linked, target)) target else compressedRoot()
+ private def compressedRoot(): DefaultPromise[T] =
+ get() match {
+ case linked: DefaultPromise[_] => compressedRoot(linked)
case _ => this
}
+
+ @tailrec
+ private[this] final def compressedRoot(linked: DefaultPromise[_]): DefaultPromise[T] = {
+ val target = linked.asInstanceOf[DefaultPromise[T]].root
+ if (linked eq target) target
+ else if (compareAndSet(linked, target)) target
+ else {
+ get() match {
+ case newLinked: DefaultPromise[_] => compressedRoot(newLinked)
+ case _ => this
+ }
+ }
}
/** Get the promise at the root of the chain of linked promises. Used by `compressedRoot()`.
@@ -182,18 +218,16 @@ private[concurrent] object Promise {
* to compress the link chain whenever possible.
*/
@tailrec
- private def root: DefaultPromise[T] = {
- getState match {
+ private def root: DefaultPromise[T] =
+ get() match {
case linked: DefaultPromise[_] => linked.asInstanceOf[DefaultPromise[T]].root
case _ => this
}
- }
/** Try waiting for this promise to be completed.
*/
protected final def tryAwait(atMost: Duration): Boolean = if (!isCompleted) {
import Duration.Undefined
- import scala.concurrent.Future.InternalCallbackExecutor
atMost match {
case e if e eq Undefined => throw new IllegalArgumentException("cannot wait for Undefined period")
case Duration.Inf =>
@@ -225,18 +259,18 @@ private[concurrent] object Promise {
def value: Option[Try[T]] = value0
@tailrec
- private def value0: Option[Try[T]] = getState match {
+ private def value0: Option[Try[T]] = get() match {
case c: Try[_] => Some(c.asInstanceOf[Try[T]])
- case _: DefaultPromise[_] => compressedRoot().value0
+ case dp: DefaultPromise[_] => compressedRoot(dp).value0
case _ => None
}
override def isCompleted: Boolean = isCompleted0
@tailrec
- private def isCompleted0: Boolean = getState match {
+ private def isCompleted0: Boolean = get() match {
case _: Try[_] => true
- case _: DefaultPromise[_] => compressedRoot().isCompleted0
+ case dp: DefaultPromise[_] => compressedRoot(dp).isCompleted0
case _ => false
}
@@ -254,21 +288,17 @@ private[concurrent] object Promise {
*/
@tailrec
private def tryCompleteAndGetListeners(v: Try[T]): List[CallbackRunnable[T]] = {
- getState match {
+ get() match {
case raw: List[_] =>
val cur = raw.asInstanceOf[List[CallbackRunnable[T]]]
- if (updateState(cur, v)) cur else tryCompleteAndGetListeners(v)
- case _: DefaultPromise[_] =>
- compressedRoot().tryCompleteAndGetListeners(v)
+ if (compareAndSet(cur, v)) cur else tryCompleteAndGetListeners(v)
+ case dp: DefaultPromise[_] => compressedRoot(dp).tryCompleteAndGetListeners(v)
case _ => null
}
}
- def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = {
- val preparedEC = executor.prepare()
- val runnable = new CallbackRunnable[T](preparedEC, func)
- dispatchOrAddCallback(runnable)
- }
+ def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit =
+ dispatchOrAddCallback(new CallbackRunnable[T](executor.prepare(), func))
/** Tries to add the callback, if already completed, it dispatches the callback to be executed.
* Used by `onComplete()` to add callbacks to a promise and by `link()` to transfer callbacks
@@ -276,15 +306,16 @@ private[concurrent] object Promise {
*/
@tailrec
private def dispatchOrAddCallback(runnable: CallbackRunnable[T]): Unit = {
- getState match {
+ get() match {
case r: Try[_] => runnable.executeWithValue(r.asInstanceOf[Try[T]])
- case _: DefaultPromise[_] => compressedRoot().dispatchOrAddCallback(runnable)
- case listeners: List[_] => if (updateState(listeners, runnable :: listeners)) () else dispatchOrAddCallback(runnable)
+ case dp: DefaultPromise[_] => compressedRoot(dp).dispatchOrAddCallback(runnable)
+ case listeners: List[_] => if (compareAndSet(listeners, runnable :: listeners)) ()
+ else dispatchOrAddCallback(runnable)
}
}
/** Link this promise to the root of another promise using `link()`. Should only be
- * be called by Future.flatMap.
+ * be called by transformWith.
*/
protected[concurrent] final def linkRootOf(target: DefaultPromise[T]): Unit = link(target.compressedRoot())
@@ -299,18 +330,17 @@ private[concurrent] object Promise {
*/
@tailrec
private def link(target: DefaultPromise[T]): Unit = if (this ne target) {
- getState match {
+ get() match {
case r: Try[_] =>
- if (!target.tryComplete(r.asInstanceOf[Try[T]])) {
- // Currently linking is done from Future.flatMap, which should ensure only
- // one promise can be completed. Therefore this situation is unexpected.
+ if (!target.tryComplete(r.asInstanceOf[Try[T]]))
throw new IllegalStateException("Cannot link completed promises together")
- }
- case _: DefaultPromise[_] =>
- compressedRoot().link(target)
- case listeners: List[_] => if (updateState(listeners, target)) {
- if (!listeners.isEmpty) listeners.asInstanceOf[List[CallbackRunnable[T]]].foreach(target.dispatchOrAddCallback(_))
- } else link(target)
+ case dp: DefaultPromise[_] =>
+ compressedRoot(dp).link(target)
+ case listeners: List[_] if compareAndSet(listeners, target) =>
+ if (listeners.nonEmpty)
+ listeners.asInstanceOf[List[CallbackRunnable[T]]].foreach(target.dispatchOrAddCallback(_))
+ case _ =>
+ link(target)
}
}
}
@@ -319,23 +349,58 @@ private[concurrent] object Promise {
*
* Useful in Future-composition when a value to contribute is already available.
*/
- final class KeptPromise[T](suppliedValue: Try[T]) extends Promise[T] {
+ object KeptPromise {
+ import scala.concurrent.Future
+ import scala.reflect.ClassTag
+
+ private[this] sealed trait Kept[T] extends Promise[T] {
+ def result: Try[T]
+
+ override def value: Option[Try[T]] = Some(result)
- val value = Some(resolveTry(suppliedValue))
+ override def isCompleted: Boolean = true
- override def isCompleted: Boolean = true
+ override def tryComplete(value: Try[T]): Boolean = false
- def tryComplete(value: Try[T]): Boolean = false
+ override def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit =
+ (new CallbackRunnable(executor.prepare(), func)).executeWithValue(result)
- def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = {
- val completedAs = value.get
- val preparedEC = executor.prepare()
- (new CallbackRunnable(preparedEC, func)).executeWithValue(completedAs)
+ override def ready(atMost: Duration)(implicit permit: CanAwait): this.type = this
+
+ override def result(atMost: Duration)(implicit permit: CanAwait): T = result.get
}
- def ready(atMost: Duration)(implicit permit: CanAwait): this.type = this
+ private[this] final class Successful[T](val result: Success[T]) extends Kept[T] {
+ override def onFailure[U](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = ()
+ override def failed: Future[Throwable] = KeptPromise(Failure(new NoSuchElementException("Future.failed not completed with a throwable."))).future
+ override def recover[U >: T](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = this
+ override def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = this
+ override def fallbackTo[U >: T](that: Future[U]): Future[U] = this
+ }
- def result(atMost: Duration)(implicit permit: CanAwait): T = value.get.get
+ private[this] final class Failed[T](val result: Failure[T]) extends Kept[T] {
+ private[this] final def thisAs[S]: Future[S] = future.asInstanceOf[Future[S]]
+
+ override def onSuccess[U](pf: PartialFunction[T, U])(implicit executor: ExecutionContext): Unit = ()
+ override def failed: Future[Throwable] = thisAs[Throwable]
+ override def foreach[U](f: T => U)(implicit executor: ExecutionContext): Unit = ()
+ override def map[S](f: T => S)(implicit executor: ExecutionContext): Future[S] = thisAs[S]
+ override def flatMap[S](f: T => Future[S])(implicit executor: ExecutionContext): Future[S] = thisAs[S]
+ override def flatten[S](implicit ev: T <:< Future[S]): Future[S] = thisAs[S]
+ override def filter(p: T => Boolean)(implicit executor: ExecutionContext): Future[T] = this
+ override def collect[S](pf: PartialFunction[T, S])(implicit executor: ExecutionContext): Future[S] = thisAs[S]
+ override def zip[U](that: Future[U]): Future[(T, U)] = thisAs[(T,U)]
+ override def zipWith[U, R](that: Future[U])(f: (T, U) => R)(implicit executor: ExecutionContext): Future[R] = thisAs[R]
+ override def fallbackTo[U >: T](that: Future[U]): Future[U] =
+ if (this eq that) this else that.recoverWith({ case _ => this })(InternalCallbackExecutor)
+ override def mapTo[S](implicit tag: ClassTag[S]): Future[S] = thisAs[S]
+ }
+
+ def apply[T](result: Try[T]): scala.concurrent.Promise[T] =
+ resolveTry(result) match {
+ case s @ Success(_) => new Successful(s)
+ case f @ Failure(_) => new Failed(f)
+ }
}
}
diff --git a/src/library/scala/io/Source.scala b/src/library/scala/io/Source.scala
index 74c3e06839..dbd6a5f6f2 100644
--- a/src/library/scala/io/Source.scala
+++ b/src/library/scala/io/Source.scala
@@ -10,7 +10,7 @@ package scala
package io
import scala.collection.AbstractIterator
-import java.io.{ FileInputStream, InputStream, PrintStream, File => JFile }
+import java.io.{ FileInputStream, InputStream, PrintStream, File => JFile, Closeable }
import java.net.{ URI, URL }
/** This object provides convenience methods to create an iterable
@@ -176,7 +176,7 @@ object Source {
* @author Burak Emir
* @version 1.0
*/
-abstract class Source extends Iterator[Char] {
+abstract class Source extends Iterator[Char] with Closeable {
/** the actual iterator */
protected val iter: Iterator[Char]
diff --git a/src/library/scala/runtime/BoxesRunTime.java b/src/library/scala/runtime/BoxesRunTime.java
index 82a3b00ac4..a6df20165d 100644
--- a/src/library/scala/runtime/BoxesRunTime.java
+++ b/src/library/scala/runtime/BoxesRunTime.java
@@ -183,7 +183,7 @@ public final class BoxesRunTime
return xc.equals(y);
}
- private static boolean equalsNumChar(java.lang.Number xn, java.lang.Character yc) {
+ public static boolean equalsNumChar(java.lang.Number xn, java.lang.Character yc) {
if (yc == null)
return xn == null;
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index f50059ce54..6c69ebae9b 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -13,6 +13,7 @@ import scala.collection.{ Seq, IndexedSeq, TraversableView, AbstractIterator }
import scala.collection.mutable.WrappedArray
import scala.collection.immutable.{ StringLike, NumericRange, List, Stream, Nil, :: }
import scala.collection.generic.{ Sorted, IsTraversableLike }
+import scala.collection.parallel.ParIterable
import scala.reflect.{ ClassTag, classTag }
import scala.util.control.ControlThrowable
import java.lang.{ Class => jClass }
@@ -326,6 +327,7 @@ object ScalaRunTime {
case x: AnyRef if isArray(x) => arrayToString(x)
case x: scala.collection.Map[_, _] => x.iterator take maxElements map mapInner mkString (x.stringPrefix + "(", ", ", ")")
case x: Iterable[_] => x.iterator take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")")
+ case x: ParIterable[_] => x.iterator take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")")
case x: Traversable[_] => x take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")")
case x: Product1[_] if isTuple(x) => "(" + inner(x._1) + ",)" // that special trailing comma
case x: Product if isTuple(x) => x.productIterator map inner mkString ("(", ",", ")")
diff --git a/src/library/scala/sys/process/BasicIO.scala b/src/library/scala/sys/process/BasicIO.scala
index b31bbf0540..866dac4458 100644
--- a/src/library/scala/sys/process/BasicIO.scala
+++ b/src/library/scala/sys/process/BasicIO.scala
@@ -221,7 +221,7 @@ object BasicIO {
*/
def transferFully(in: InputStream, out: OutputStream): Unit =
try transferFullyImpl(in, out)
- catch onInterrupt(())
+ catch onIOInterrupt(())
private[this] def appendLine(buffer: Appendable): String => Unit = line => {
buffer append line
diff --git a/src/library/scala/sys/process/ProcessImpl.scala b/src/library/scala/sys/process/ProcessImpl.scala
index 2b7fcdeb73..d15f1a2b3d 100644
--- a/src/library/scala/sys/process/ProcessImpl.scala
+++ b/src/library/scala/sys/process/ProcessImpl.scala
@@ -109,45 +109,46 @@ private[process] trait ProcessImpl {
}
private[process] class PipedProcesses(a: ProcessBuilder, b: ProcessBuilder, defaultIO: ProcessIO, toError: Boolean) extends CompoundProcess {
- protected[this] override def runAndExitValue() = {
- val currentSource = new SyncVar[Option[InputStream]]
- val pipeOut = new PipedOutputStream
- val source = new PipeSource(currentSource, pipeOut, a.toString)
+ protected[this] override def runAndExitValue() = runAndExitValue(new PipeSource(a.toString), new PipeSink(b.toString))
+ protected[this] def runAndExitValue(source: PipeSource, sink: PipeSink): Option[Int] = {
+ source connectOut sink
source.start()
-
- val pipeIn = new PipedInputStream(pipeOut)
- val currentSink = new SyncVar[Option[OutputStream]]
- val sink = new PipeSink(pipeIn, currentSink, b.toString)
sink.start()
- def handleOutOrError(fromOutput: InputStream) = currentSource put Some(fromOutput)
+ /** Release PipeSource, PipeSink and Process in the correct order.
+ * If once connect Process with Source or Sink, then the order of releasing them
+ * must be Source -> Sink -> Process, otherwise IOException will be thrown. */
+ def releaseResources(so: PipeSource, sk: PipeSink, p: Process *) = {
+ so.release()
+ sk.release()
+ p foreach( _.destroy() )
+ }
val firstIO =
- if (toError)
- defaultIO.withError(handleOutOrError)
- else
- defaultIO.withOutput(handleOutOrError)
- val secondIO = defaultIO.withInput(toInput => currentSink put Some(toInput))
-
- val second = b.run(secondIO)
- val first = a.run(firstIO)
- try {
- runInterruptible {
- val exit1 = first.exitValue()
- currentSource put None
- currentSink put None
- val exit2 = second.exitValue()
- // Since file redirection (e.g. #>) is implemented as a piped process,
- // we ignore its exit value so cmd #> file doesn't always return 0.
- if (b.hasExitValue) exit2 else exit1
- } {
- first.destroy()
- second.destroy()
+ if (toError) defaultIO.withError(source.connectIn)
+ else defaultIO.withOutput(source.connectIn)
+ val secondIO = defaultIO.withInput(sink.connectOut)
+
+ val second =
+ try b.run(secondIO)
+ catch onError { err =>
+ releaseResources(source, sink)
+ throw err
}
- }
- finally {
- BasicIO close pipeIn
- BasicIO close pipeOut
+ val first =
+ try a.run(firstIO)
+ catch onError { err =>
+ releaseResources(source, sink, second)
+ throw err
+ }
+ runInterruptible {
+ val exit1 = first.exitValue()
+ val exit2 = second.exitValue()
+ // Since file redirection (e.g. #>) is implemented as a piped process,
+ // we ignore its exit value so cmd #> file doesn't always return 0.
+ if (b.hasExitValue) exit2 else exit1
+ } {
+ releaseResources(source, sink, first, second)
}
}
}
@@ -168,37 +169,46 @@ private[process] trait ProcessImpl {
}
}
- private[process] class PipeSource(
- currentSource: SyncVar[Option[InputStream]],
- pipe: PipedOutputStream,
- label: => String
- ) extends PipeThread(false, () => label) {
-
- final override def run(): Unit = currentSource.get match {
- case Some(source) =>
- try runloop(source, pipe)
- finally currentSource.unset()
-
- run()
- case None =>
- currentSource.unset()
- BasicIO close pipe
+ private[process] class PipeSource(label: => String) extends PipeThread(false, () => label) {
+ protected[this] val pipe = new PipedOutputStream
+ protected[this] val source = new LinkedBlockingQueue[Option[InputStream]]
+ override def run(): Unit = {
+ try {
+ source.take match {
+ case Some(in) => runloop(in, pipe)
+ case None =>
+ }
+ }
+ catch onInterrupt(())
+ finally BasicIO close pipe
+ }
+ def connectIn(in: InputStream): Unit = source add Some(in)
+ def connectOut(sink: PipeSink): Unit = sink connectIn pipe
+ def release(): Unit = {
+ interrupt()
+ source add None
+ join()
}
}
- private[process] class PipeSink(
- pipe: PipedInputStream,
- currentSink: SyncVar[Option[OutputStream]],
- label: => String
- ) extends PipeThread(true, () => label) {
-
- final override def run(): Unit = currentSink.get match {
- case Some(sink) =>
- try runloop(pipe, sink)
- finally currentSink.unset()
-
- run()
- case None =>
- currentSink.unset()
+ private[process] class PipeSink(label: => String) extends PipeThread(true, () => label) {
+ protected[this] val pipe = new PipedInputStream
+ protected[this] val sink = new LinkedBlockingQueue[Option[OutputStream]]
+ override def run(): Unit = {
+ try {
+ sink.take match {
+ case Some(out) => runloop(pipe, out)
+ case None =>
+ }
+ }
+ catch onInterrupt(())
+ finally BasicIO close pipe
+ }
+ def connectOut(out: OutputStream): Unit = sink add Some(out)
+ def connectIn(pipeOut: PipedOutputStream): Unit = pipe connect pipeOut
+ def release(): Unit = {
+ interrupt()
+ sink add None
+ join()
}
}
diff --git a/src/library/scala/sys/process/package.scala b/src/library/scala/sys/process/package.scala
index 1340a6c415..91fa99e3df 100644
--- a/src/library/scala/sys/process/package.scala
+++ b/src/library/scala/sys/process/package.scala
@@ -224,16 +224,26 @@ package scala.sys {
final val processDebug = props contains "scala.process.debug"
dbg("Initializing process package.")
- type =?>[-A, +B] = PartialFunction[A, B]
- type Closeable = java.io.Closeable
- type File = java.io.File
- type IOException = java.io.IOException
- type InputStream = java.io.InputStream
- type JProcess = java.lang.Process
- type JProcessBuilder = java.lang.ProcessBuilder
- type OutputStream = java.io.OutputStream
- type SyncVar[T] = scala.concurrent.SyncVar[T]
- type URL = java.net.URL
+ type =?>[-A, +B] = PartialFunction[A, B]
+ type Closeable = java.io.Closeable
+ type File = java.io.File
+ type IOException = java.io.IOException
+ type InterruptedIOException = java.io.InterruptedIOException
+ type InputStream = java.io.InputStream
+ type JProcess = java.lang.Process
+ type JProcessBuilder = java.lang.ProcessBuilder
+ type LinkedBlockingQueue[T] = java.util.concurrent.LinkedBlockingQueue[T]
+ type OutputStream = java.io.OutputStream
+ type SyncVar[T] = scala.concurrent.SyncVar[T]
+ type URL = java.net.URL
+
+ def onError[T](handler: Throwable => T): Throwable =?> T = {
+ case e @ _ => handler(e)
+ }
+
+ def onIOInterrupt[T](handler: => T): Throwable =?> T = {
+ case _: InterruptedIOException => handler
+ }
def onInterrupt[T](handler: => T): Throwable =?> T = {
case _: InterruptedException => handler
diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala
index b0cf122f2a..f08cc18f7d 100644
--- a/src/library/scala/util/Try.scala
+++ b/src/library/scala/util/Try.scala
@@ -74,16 +74,11 @@ sealed abstract class Try[+T] {
*
* ''Note:'': This will throw an exception if it is not a success and default throws an exception.
*/
- def getOrElse[U >: T](default: => U): U =
- if (isSuccess) get else default
+ def getOrElse[U >: T](default: => U): U
/** Returns this `Try` if it's a `Success` or the given `default` argument if this is a `Failure`.
*/
- def orElse[U >: T](default: => Try[U]): Try[U] =
- try if (isSuccess) this else default
- catch {
- case NonFatal(e) => Failure(e)
- }
+ def orElse[U >: T](default: => Try[U]): Try[U]
/** Returns the value from this `Success` or throws the exception if this is a `Failure`.
*/
@@ -107,6 +102,11 @@ sealed abstract class Try[+T] {
def map[U](f: T => U): Try[U]
/**
+ * Applies the given partial function to the value from this `Success` or returns this if this is a `Failure`.
+ */
+ def collect[U](pf: PartialFunction[T, U]): Try[U]
+
+ /**
* Converts this to a `Failure` if the predicate is not satisfied.
*/
def filter(p: T => Boolean): Try[T]
@@ -133,6 +133,7 @@ sealed abstract class Try[+T] {
* collection" contract even though it seems unlikely to matter much in a
* collection with max size 1.
*/
+ @deprecatedInheritance("You were never supposed to be able to extend this class.", "2.12")
class WithFilter(p: T => Boolean) {
def map[U](f: T => U): Try[U] = Try.this filter p map f
def flatMap[U](f: T => Try[U]): Try[U] = Try.this filter p flatMap f
@@ -144,18 +145,18 @@ sealed abstract class Try[+T] {
* Applies the given function `f` if this is a `Failure`, otherwise returns this if this is a `Success`.
* This is like `flatMap` for the exception.
*/
- def recoverWith[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U]
+ def recoverWith[U >: T](@deprecatedName('f) pf: PartialFunction[Throwable, Try[U]]): Try[U]
/**
* Applies the given function `f` if this is a `Failure`, otherwise returns this if this is a `Success`.
* This is like map for the exception.
*/
- def recover[U >: T](f: PartialFunction[Throwable, U]): Try[U]
+ def recover[U >: T](@deprecatedName('f) pf: PartialFunction[Throwable, U]): Try[U]
/**
* Returns `None` if this is a `Failure` or a `Some` containing the value if this is a `Success`.
*/
- def toOption: Option[T] = if (isSuccess) Some(get) else None
+ def toOption: Option[T]
/**
* Transforms a nested `Try`, ie, a `Try` of type `Try[Try[T]]`,
@@ -172,14 +173,7 @@ sealed abstract class Try[+T] {
/** Completes this `Try` by applying the function `f` to this if this is of type `Failure`, or conversely, by applying
* `s` if this is a `Success`.
*/
- def transform[U](s: T => Try[U], f: Throwable => Try[U]): Try[U] =
- try this match {
- case Success(v) => s(v)
- case Failure(e) => f(e)
- } catch {
- case NonFatal(e) => Failure(e)
- }
-
+ def transform[U](s: T => Try[U], f: Throwable => Try[U]): Try[U]
}
object Try {
@@ -191,57 +185,55 @@ object Try {
try Success(r) catch {
case NonFatal(e) => Failure(e)
}
-
}
final case class Failure[+T](exception: Throwable) extends Try[T] {
- def isFailure: Boolean = true
- def isSuccess: Boolean = false
- def recoverWith[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U] =
- try {
- if (f isDefinedAt exception) f(exception) else this
- } catch {
- case NonFatal(e) => Failure(e)
- }
- def get: T = throw exception
- def flatMap[U](f: T => Try[U]): Try[U] = this.asInstanceOf[Try[U]]
- def flatten[U](implicit ev: T <:< Try[U]): Try[U] = this.asInstanceOf[Try[U]]
- def foreach[U](f: T => U): Unit = ()
- def map[U](f: T => U): Try[U] = this.asInstanceOf[Try[U]]
- def filter(p: T => Boolean): Try[T] = this
- def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] =
- try {
- if (rescueException isDefinedAt exception) {
- Try(rescueException(exception))
- } else this
- } catch {
- case NonFatal(e) => Failure(e)
- }
- def failed: Try[Throwable] = Success(exception)
+ override def isFailure: Boolean = true
+ override def isSuccess: Boolean = false
+ override def get: T = throw exception
+ override def getOrElse[U >: T](default: => U): U = default
+ override def orElse[U >: T](default: => Try[U]): Try[U] =
+ try default catch { case NonFatal(e) => Failure(e) }
+ override def flatMap[U](f: T => Try[U]): Try[U] = this.asInstanceOf[Try[U]]
+ override def flatten[U](implicit ev: T <:< Try[U]): Try[U] = this.asInstanceOf[Try[U]]
+ override def foreach[U](f: T => U): Unit = ()
+ override def transform[U](s: T => Try[U], f: Throwable => Try[U]): Try[U] =
+ try f(exception) catch { case NonFatal(e) => Failure(e) }
+ override def map[U](f: T => U): Try[U] = this.asInstanceOf[Try[U]]
+ override def collect[U](pf: PartialFunction[T, U]): Try[U] = this.asInstanceOf[Try[U]]
+ override def filter(p: T => Boolean): Try[T] = this
+ override def recover[U >: T](@deprecatedName('rescueException) pf: PartialFunction[Throwable, U]): Try[U] =
+ try { if (pf isDefinedAt exception) Success(pf(exception)) else this } catch { case NonFatal(e) => Failure(e) }
+ override def recoverWith[U >: T](@deprecatedName('f) pf: PartialFunction[Throwable, Try[U]]): Try[U] =
+ try { if (pf isDefinedAt exception) pf(exception) else this } catch { case NonFatal(e) => Failure(e) }
+ override def failed: Try[Throwable] = Success(exception)
+ override def toOption: Option[T] = None
}
final case class Success[+T](value: T) extends Try[T] {
- def isFailure: Boolean = false
- def isSuccess: Boolean = true
- def recoverWith[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U] = this
- def get = value
- def flatMap[U](f: T => Try[U]): Try[U] =
- try f(value)
- catch {
- case NonFatal(e) => Failure(e)
- }
- def flatten[U](implicit ev: T <:< Try[U]): Try[U] = value
- def foreach[U](f: T => U): Unit = f(value)
- def map[U](f: T => U): Try[U] = Try[U](f(value))
- def filter(p: T => Boolean): Try[T] = {
+ override def isFailure: Boolean = false
+ override def isSuccess: Boolean = true
+ override def get = value
+ override def getOrElse[U >: T](default: => U): U = get
+ override def orElse[U >: T](default: => Try[U]): Try[U] = this
+ override def flatMap[U](f: T => Try[U]): Try[U] =
+ try f(value) catch { case NonFatal(e) => Failure(e) }
+ override def flatten[U](implicit ev: T <:< Try[U]): Try[U] = value
+ override def foreach[U](f: T => U): Unit = f(value)
+ override def transform[U](s: T => Try[U], f: Throwable => Try[U]): Try[U] = this flatMap s
+ override def map[U](f: T => U): Try[U] = Try[U](f(value))
+ override def collect[U](pf: PartialFunction[T, U]): Try[U] =
try {
- if (p(value)) this
+ if (pf isDefinedAt value) Success(pf(value))
else Failure(new NoSuchElementException("Predicate does not hold for " + value))
- } catch {
- case NonFatal(e) => Failure(e)
- }
- }
- def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] = this
- def failed: Try[Throwable] = Failure(new UnsupportedOperationException("Success.failed"))
+ } catch { case NonFatal(e) => Failure(e) }
+ override def filter(p: T => Boolean): Try[T] =
+ try {
+ if (p(value)) this else Failure(new NoSuchElementException("Predicate does not hold for " + value))
+ } catch { case NonFatal(e) => Failure(e) }
+ override def recover[U >: T](@deprecatedName('rescueException) pf: PartialFunction[Throwable, U]): Try[U] = this
+ override def recoverWith[U >: T](@deprecatedName('f) pf: PartialFunction[Throwable, Try[U]]): Try[U] = this
+ override def failed: Try[Throwable] = Failure(new UnsupportedOperationException("Success.failed"))
+ override def toOption: Option[T] = Some(value)
}
diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala
index 4a35e024de..2fca99aff9 100644
--- a/src/reflect/scala/reflect/internal/Mirrors.scala
+++ b/src/reflect/scala/reflect/internal/Mirrors.scala
@@ -180,7 +180,7 @@ trait Mirrors extends api.Mirrors {
def getPackageObject(fullname: String): ModuleSymbol = getPackageObject(newTermName(fullname))
def getPackageObject(fullname: TermName): ModuleSymbol =
- (getPackage(fullname).info member nme.PACKAGE) match {
+ (getPackage(fullname).packageObject) match {
case x: ModuleSymbol => x
case _ => MissingRequirementError.notFound("package object " + fullname)
}
@@ -191,15 +191,6 @@ trait Mirrors extends api.Mirrors {
def getPackageObjectIfDefined(fullname: TermName): Symbol =
wrapMissing(getPackageObject(fullname))
- final def getPackageObjectWithMember(pre: Type, sym: Symbol): Symbol = {
- // The owner of a symbol which requires package qualification may be the
- // package object iself, but it also could be any superclass of the package
- // object. In the latter case, we must go through the qualifier's info
- // to obtain the right symbol.
- if (sym.owner.isModuleClass) sym.owner.sourceModule // fast path, if the member is owned by a module class, that must be linked to the package object
- else pre member nme.PACKAGE // otherwise we have to findMember
- }
-
override def staticPackage(fullname: String): ModuleSymbol =
try ensurePackageSymbol(fullname.toString, getModuleOrClass(newTermNameCached(fullname)), allowModules = false)
catch { case mre: MissingRequirementError => throw new ScalaReflectionException(mre.msg) }
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index 99ff6a10b4..b79036724d 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -111,6 +111,7 @@ trait StdNames {
val PACKAGE: NameType = "package"
val ROOT: NameType = "<root>"
val SPECIALIZED_SUFFIX: NameType = "$sp"
+ val CASE_ACCESSOR: NameType = "$access"
// value types (and AnyRef) are all used as terms as well
// as (at least) arguments to the @specialize annotation.
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index ed5c68fe82..7539b6e046 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -332,7 +332,7 @@ abstract class SymbolTable extends macros.Universe
/** if there's a `package` member object in `pkgClass`, enter its members into it. */
def openPackageModule(pkgClass: Symbol) {
- val pkgModule = pkgClass.info.decl(nme.PACKAGEkw)
+ val pkgModule = pkgClass.packageObject
def fromSource = pkgModule.rawInfo match {
case ltp: SymLoader => ltp.fromSource
case _ => false
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index 2c039ab5a7..aa62df7093 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -810,6 +810,12 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
def skipPackageObject: Symbol = this
+ /** The package object symbol corresponding to this package or package class symbol, or NoSymbol otherwise */
+ def packageObject: Symbol =
+ if (isPackageClass) tpe.packageObject
+ else if (isPackage) moduleClass.packageObject
+ else NoSymbol
+
/** If this is a constructor, its owner: otherwise this.
*/
final def skipConstructor: Symbol = if (isConstructor) owner else this
@@ -3364,13 +3370,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def implicitMembers: Scope = {
val tp = info
if ((implicitMembersCacheKey1 ne tp) || (implicitMembersCacheKey2 ne tp.decls.elems)) {
- // Skip a package object class, because the members are also in
- // the package and we wish to avoid spurious ambiguities as in pos/t3999.
- if (!isPackageObjectClass) {
- implicitMembersCacheValue = tp.implicitMembers
- implicitMembersCacheKey1 = tp
- implicitMembersCacheKey2 = tp.decls.elems
- }
+ implicitMembersCacheValue = tp.membersBasedOnFlags(BridgeFlags, IMPLICIT)
+ implicitMembersCacheKey1 = tp
+ implicitMembersCacheKey2 = tp.decls.elems
}
implicitMembersCacheValue
}
diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala
index 6e8e992d16..6ddd49045c 100644
--- a/src/reflect/scala/reflect/internal/TreeGen.scala
+++ b/src/reflect/scala/reflect/internal/TreeGen.scala
@@ -191,8 +191,8 @@ abstract class TreeGen {
)
val pkgQualifier =
if (needsPackageQualifier) {
- val packageObject = rootMirror.getPackageObjectWithMember(qual.tpe, sym)
- Select(qual, nme.PACKAGE) setSymbol packageObject setType singleType(qual.tpe, packageObject)
+ val packageObject = qualsym.packageObject
+ Select(qual, nme.PACKAGE) setSymbol packageObject setType packageObject.typeOfThis
}
else qual
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index a95f626a0b..325de013d7 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -589,7 +589,12 @@ trait Types
def nonPrivateMembersAdmitting(admit: Long): Scope = membersBasedOnFlags(BridgeAndPrivateFlags & ~admit, 0)
/** A list of all implicit symbols of this type (defined or inherited) */
- def implicitMembers: Scope = membersBasedOnFlags(BridgeFlags, IMPLICIT)
+ def implicitMembers: Scope = {
+ typeSymbolDirect match {
+ case sym: ModuleClassSymbol => sym.implicitMembers
+ case _ => membersBasedOnFlags(BridgeFlags, IMPLICIT)
+ }
+ }
/** A list of all deferred symbols of this type (defined or inherited) */
def deferredMembers: Scope = membersBasedOnFlags(BridgeFlags, DEFERRED)
@@ -606,6 +611,8 @@ trait Types
def nonPrivateMember(name: Name): Symbol =
memberBasedOnName(name, BridgeAndPrivateFlags)
+ def packageObject: Symbol = member(nme.PACKAGE)
+
/** The non-private member with given name, admitting members with given flags `admit`.
* "Admitting" refers to the fact that members with a PRIVATE, BRIDGE, or VBRIDGE
* flag are usually excluded from findMember results, but supplying any of those flags
@@ -1600,7 +1607,14 @@ trait Types
private var normalized: Type = _
private def normalizeImpl = {
// TODO see comments around def intersectionType and def merge
- def flatten(tps: List[Type]): List[Type] = tps flatMap { case RefinedType(parents, ds) if ds.isEmpty => flatten(parents) case tp => List(tp) }
+ // SI-8575 The dealias is needed here to keep subtyping transitive, example in run/t8575b.scala
+ def flatten(tps: List[Type]): List[Type] = {
+ def dealiasRefinement(tp: Type) = if (tp.dealias.isInstanceOf[RefinedType]) tp.dealias else tp
+ tps map dealiasRefinement flatMap {
+ case RefinedType(parents, ds) if ds.isEmpty => flatten(parents)
+ case tp => List(tp)
+ }
+ }
val flattened = flatten(parents).distinct
if (decls.isEmpty && hasLength(flattened, 1)) {
flattened.head
diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala
index d5b5967145..ac7839bcfd 100644
--- a/src/reflect/scala/reflect/internal/transform/Erasure.scala
+++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala
@@ -280,8 +280,17 @@ trait Erasure {
}
object boxingErasure extends ScalaErasureMap {
+ private var boxPrimitives = true
+
+ override def applyInArray(tp: Type): Type = {
+ val saved = boxPrimitives
+ boxPrimitives = false
+ try super.applyInArray(tp)
+ finally boxPrimitives = saved
+ }
+
override def eraseNormalClassRef(tref: TypeRef) =
- if (isPrimitiveValueClass(tref.sym)) boxedClass(tref.sym).tpe
+ if (boxPrimitives && isPrimitiveValueClass(tref.sym)) boxedClass(tref.sym).tpe
else super.eraseNormalClassRef(tref)
override def eraseDerivedValueClassRef(tref: TypeRef) =
super.eraseNormalClassRef(tref)
diff --git a/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala
index cc217d2f80..f853df0484 100755
--- a/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala
@@ -62,15 +62,15 @@ trait MemberLookupBase {
syms.flatMap { case (sym, owner) =>
// reconstruct the original link
def linkName(sym: Symbol) = {
- def nameString(s: Symbol) = s.nameString + (if ((s.isModule || s.isModuleClass) && !s.isPackage) "$" else "")
- val packageSuffix = if (sym.isPackage) ".package" else ""
+ def nameString(s: Symbol) = s.nameString + (if ((s.isModule || s.isModuleClass) && !s.hasPackageFlag) "$" else "")
+ val packageSuffix = if (sym.hasPackageFlag) ".package" else ""
sym.ownerChain.reverse.filterNot(isRoot(_)).map(nameString(_)).mkString(".") + packageSuffix
}
- if (sym.isClass || sym.isModule || sym.isTrait || sym.isPackage)
+ if (sym.isClass || sym.isModule || sym.isTrait || sym.hasPackageFlag)
findExternalLink(sym, linkName(sym))
- else if (owner.isClass || owner.isModule || owner.isTrait || owner.isPackage)
+ else if (owner.isClass || owner.isModule || owner.isTrait || owner.hasPackageFlag)
findExternalLink(sym, linkName(owner) + "@" + externalSignature(sym))
else
None
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
index b5a8d1ac36..45cef88f7a 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
@@ -143,7 +143,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
{ if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty) NodeSeq.Empty else
{
if (!tpl.linearizationTemplates.isEmpty)
- <div id="ancestors">
+ <div class="ancestors">
<span class="filtertype">Inherited<br/>
</span>
<ol id="linearization">
@@ -153,7 +153,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
else NodeSeq.Empty
} ++ {
if (!tpl.conversions.isEmpty)
- <div id="ancestors">
+ <div class="ancestors">
<span class="filtertype">Implicitly<br/>
</span>
<ol id="implicits"> {
@@ -167,7 +167,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
</div>
else NodeSeq.Empty
} ++
- <div id="ancestors">
+ <div class="ancestors">
<span class="filtertype"></span>
<ol>
<li class="hideall out"><span>Hide All</span></li>
@@ -201,28 +201,28 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
}
{ if (absValueMembers.isEmpty) NodeSeq.Empty else
- <div id="values" class="values members">
+ <div class="values members">
<h3>Abstract Value Members</h3>
<ol>{ absValueMembers map (memberToHtml(_, tpl)) }</ol>
</div>
}
{ if (concValueMembers.isEmpty) NodeSeq.Empty else
- <div id="values" class="values members">
+ <div class="values members">
<h3>{ if (absValueMembers.isEmpty) "Value Members" else "Concrete Value Members" }</h3>
<ol>{ concValueMembers map (memberToHtml(_, tpl)) }</ol>
</div>
}
{ if (shadowedImplicitMembers.isEmpty) NodeSeq.Empty else
- <div id="values" class="values members">
+ <div class="values members">
<h3>Shadowed Implicit Value Members</h3>
<ol>{ shadowedImplicitMembers map (memberToHtml(_, tpl)) }</ol>
</div>
}
{ if (deprValueMembers.isEmpty) NodeSeq.Empty else
- <div id="values" class="values members">
+ <div class="values members">
<h3>Deprecated Value Members</h3>
<ol>{ deprValueMembers map (memberToHtml(_, tpl)) }</ol>
</div>
@@ -287,13 +287,19 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
}
def memberToHtml(mbr: MemberEntity, inTpl: DocTemplateEntity): NodeSeq = {
+ // Sometimes it's same, do we need signatureCompat still?
+ val sig = if (mbr.signature == mbr.signatureCompat) {
+ <a id={ mbr.signature }/>
+ } else {
+ <a id={ mbr.signature }/><a id={ mbr.signatureCompat }/>
+ }
+
val memberComment = memberToCommentHtml(mbr, inTpl, isSelf = false)
<li name={ mbr.definitionName } visbl={ if (mbr.visibility.isProtected) "prt" else "pub" }
data-isabs={ mbr.isAbstract.toString }
fullComment={ if(memberComment.filter(_.label=="div").isEmpty) "no" else "yes" }
group={ mbr.group }>
- <a id={ mbr.signature }/>
- <a id={ mbr.signatureCompat }/>
+ { sig }
{ signature(mbr, isSelf = false) }
{ memberComment }
</li>
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
index 4ff436bdc6..dc823ab1e5 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
@@ -364,7 +364,7 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
// add an id and class attribute to the SVG element
case Elem(prefix, "svg", attribs, scope, child @ _*) => {
val klass = if (isInheritanceDiagram) "class-diagram" else "package-diagram"
- Elem(prefix, "svg", attribs, scope, child map(x => transform(x)) : _*) %
+ Elem(prefix, "svg", attribs, scope, true, child map(x => transform(x)) : _*) %
new UnprefixedAttribute("id", "graph" + counter, Null) %
new UnprefixedAttribute("class", klass, Null)
}
@@ -378,7 +378,7 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
// assign id and class attributes to edges and nodes:
// the id attribute generated by dot has the format: "{class}|{id}"
case g @ Elem(prefix, "g", attribs, scope, children @ _*) if (List("edge", "node").contains((g \ "@class").toString)) => {
- var res = new Elem(prefix, "g", attribs, scope, (children map(x => transform(x))): _*)
+ var res = new Elem(prefix, "g", attribs, scope, true, (children map(x => transform(x))): _*)
val dotId = (g \ "@id").toString
if (dotId.count(_ == '|') == 1) {
val Array(klass, id) = dotId.toString.split("\\|")
@@ -395,11 +395,11 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
val imageNode = <image xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href={ ("./lib/" + kind + "_diagram.png") } width="16px" height="16px" preserveAspectRatio="xMinYMin meet" x={ xposition.get.toString } y={ yposition.get.toString }/>
val anchorNode = (g \ "a") match {
case Seq(Elem(prefix, "a", attribs, scope, children @ _*)) =>
- transform(new Elem(prefix, "a", attribs, scope, (children ++ imageNode): _*))
+ transform(new Elem(prefix, "a", attribs, scope, true, (children ++ imageNode): _*))
case _ =>
g \ "a"
}
- res = new Elem(prefix, "g", attribs, scope, anchorNode: _*)
+ res = new Elem(prefix, "g", attribs, scope, true, anchorNode: _*)
DiagramStats.addFixedImage()
}
}
@@ -413,7 +413,7 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
scala.xml.Text("")
// apply recursively
case Elem(prefix, label, attribs, scope, child @ _*) =>
- Elem(prefix, label, attribs, scope, child map(x => transform(x)) : _*)
+ Elem(prefix, label, attribs, scope, true, child map(x => transform(x)) : _*)
case x => x
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css
index 35f66cd5df..6d94452f3a 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css
@@ -217,7 +217,7 @@ dl.attributes > dd {
height: 18px;
}
-#values ol li:last-child {
+.values ol li:last-child {
margin-bottom: 5px;
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js
index 1ebcb67f04..5ef03848b2 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js
@@ -147,19 +147,19 @@ $(document).ready(function(){
filter();
});
- $("#mbrsel > div[id=ancestors] > ol > li.hideall").click(function() {
+ $("#mbrsel > div.ancestors > ol > li.hideall").click(function() {
$("#linearization li.in").removeClass("in").addClass("out");
$("#linearization li:first").removeClass("out").addClass("in");
$("#implicits li.in").removeClass("in").addClass("out");
- if ($(this).hasClass("out") && $("#mbrsel > div[id=ancestors] > ol > li.showall").hasClass("in")) {
+ if ($(this).hasClass("out") && $("#mbrsel > div.ancestors > ol > li.showall").hasClass("in")) {
$(this).removeClass("out").addClass("in");
- $("#mbrsel > div[id=ancestors] > ol > li.showall").removeClass("in").addClass("out");
+ $("#mbrsel > div.ancestors > ol > li.showall").removeClass("in").addClass("out");
}
filter();
})
- $("#mbrsel > div[id=ancestors] > ol > li.showall").click(function() {
+ $("#mbrsel > div.ancestors > ol > li.showall").click(function() {
var filteredLinearization =
$("#linearization li.out").filter(function() {
return ! isHiddenClass($(this).attr("name"));
@@ -172,9 +172,9 @@ $(document).ready(function(){
});
filteredImplicits.removeClass("out").addClass("in");
- if ($(this).hasClass("out") && $("#mbrsel > div[id=ancestors] > ol > li.hideall").hasClass("in")) {
+ if ($(this).hasClass("out") && $("#mbrsel > div.ancestors > ol > li.hideall").hasClass("in")) {
$(this).removeClass("out").addClass("in");
- $("#mbrsel > div[id=ancestors] > ol > li.hideall").removeClass("in").addClass("out");
+ $("#mbrsel > div.ancestors > ol > li.hideall").removeClass("in").addClass("out");
}
filter();
@@ -275,7 +275,7 @@ function orderAlpha() {
$("#order > ol > li.group").removeClass("in").addClass("out");
$("#template > div.parent").hide();
$("#template > div.conversion").hide();
- $("#mbrsel > div[id=ancestors]").show();
+ $("#mbrsel > div.ancestors").show();
filter();
};
@@ -285,7 +285,7 @@ function orderInherit() {
$("#order > ol > li.group").removeClass("in").addClass("out");
$("#template > div.parent").show();
$("#template > div.conversion").show();
- $("#mbrsel > div[id=ancestors]").hide();
+ $("#mbrsel > div.ancestors").hide();
filter();
};
@@ -295,7 +295,7 @@ function orderGroup() {
$("#order > ol > li.inherit").removeClass("in").addClass("out");
$("#template > div.parent").hide();
$("#template > div.conversion").hide();
- $("#mbrsel > div[id=ancestors]").show();
+ $("#mbrsel > div.ancestors").show();
filter();
};
@@ -350,7 +350,7 @@ function initInherit() {
}
});
- $("#values > ol > li").each(function(){
+ $(".values > ol > li").each(function(){
var mbr = $(this);
this.mbrText = mbr.find("> .fullcomment .cmt").text();
var qualName = mbr.attr("name");
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala
index 64eb1adbea..20aaab29fc 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala
@@ -40,7 +40,7 @@ trait MemberLookup extends base.MemberLookupBase {
override def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal] = {
val sym1 =
if (sym == AnyClass || sym == AnyRefClass || sym == AnyValClass || sym == NothingClass) ListClass
- else if (sym.isPackage)
+ else if (sym.hasPackageFlag)
/* Get package object which has associatedFile ne null */
sym.info.member(newTermName("package"))
else sym
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
index ef84ac42ba..2993c4c4b9 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -89,10 +89,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
trait TemplateImpl extends EntityImpl with TemplateEntity {
override def qualifiedName: String =
if (inTemplate == null || inTemplate.isRootPackage) name else optimize(inTemplate.qualifiedName + "." + name)
- def isPackage = sym.isPackage
+ def isPackage = sym.hasPackageFlag
def isTrait = sym.isTrait
def isClass = sym.isClass && !sym.isTrait
- def isObject = sym.isModule && !sym.isPackage
+ def isObject = sym.isModule && !sym.hasPackageFlag
def isCaseClass = sym.isCaseClass
def isRootPackage = false
def selfType = if (sym.thisSym eq sym) None else Some(makeType(sym.thisSym.typeOfThis, this))
@@ -250,7 +250,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def valueParams: List[List[ValueParam]] = Nil /** TODO, these are now only computed for DocTemplates */
def parentTypes =
- if (sym.isPackage || sym == AnyClass) List() else {
+ if (sym.hasPackageFlag || sym == AnyClass) List() else {
val tps = (this match {
case a: AliasType => sym.tpe.dealias.parents
case a: AbstractType => sym.info.bounds match {
@@ -661,7 +661,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
s != EmptyPackage && s != RootPackage
}
})
- else if (bSym.isPackage) // (2)
+ else if (bSym.hasPackageFlag) // (2)
if (settings.skipPackage(makeQualifiedName(bSym)))
None
else
@@ -772,7 +772,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
Some(new MemberTemplateImpl(bSym, inTpl) with AliasImpl with AliasType {
override def isAliasType = true
})
- else if (!modelFinished && (bSym.isPackage || templateShouldDocument(bSym, inTpl)))
+ else if (!modelFinished && (bSym.hasPackageFlag || templateShouldDocument(bSym, inTpl)))
modelCreation.createTemplate(bSym, inTpl)
else
None
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
index 2b7e2506d4..ea72fa6095 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
@@ -94,7 +94,7 @@ trait ModelFactoryTypeSupport {
LinkToMember(bMbr, oTpl)
case _ =>
val name = makeQualifiedName(bSym)
- if (!bSym.owner.isPackage)
+ if (!bSym.owner.hasPackageFlag)
Tooltip(name)
else
findExternalLink(bSym, name).getOrElse (
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala
index 86a7a67160..c1228e8735 100755
--- a/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala
@@ -49,7 +49,7 @@ trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory =>
case _ =>
}
else if (asym.isTerm && asym.owner.isClass){
- if (asym.isSetter) asym = asym.getter(asym.owner)
+ if (asym.isSetter) asym = asym.getterIn(asym.owner)
makeTemplate(asym.owner) match {
case docTmpl: DocTemplateImpl =>
val mbrs: Option[MemberImpl] = findMember(asym, docTmpl)
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
index 44d8886e4e..b300752a34 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
@@ -177,7 +177,7 @@ trait DiagramDirectiveParser {
def warning(message: String) = {
// we need the position from the package object (well, ideally its comment, but yeah ...)
- val sym = if (template.sym.isPackage) template.sym.info.member(global.nme.PACKAGE) else template.sym
+ val sym = if (template.sym.hasPackageFlag) template.sym.packageObject else template.sym
assert((sym != global.NoSymbol) || (sym == global.rootMirror.RootPackage))
global.reporter.warning(sym.pos, message)
}
diff --git a/test/files/jvm/future-spec.check b/test/files/jvm/future-spec.check
index df1629dd7e..5c80aa5586 100644
--- a/test/files/jvm/future-spec.check
+++ b/test/files/jvm/future-spec.check
@@ -1 +1 @@
-warning: there was one deprecation warning; re-run with -deprecation for details
+warning: there were 21 deprecation warnings; re-run with -deprecation for details
diff --git a/test/files/jvm/future-spec/FutureTests.scala b/test/files/jvm/future-spec/FutureTests.scala
index a290af9cd3..6b34d5bfaa 100644
--- a/test/files/jvm/future-spec/FutureTests.scala
+++ b/test/files/jvm/future-spec/FutureTests.scala
@@ -17,6 +17,19 @@ class FutureTests extends MinimalScalaTest {
case "NoReply" => Promise[String]().future
}
+ def fail(msg: String): Nothing = throw new AssertionError(msg)
+
+ def ECNotUsed[T](f: ExecutionContext => T): T = {
+ val p = Promise[Runnable]()
+ val unusedEC: ExecutionContext = new ExecutionContext {
+ def execute(r: Runnable) = p.success(r)
+ def reportFailure(t: Throwable): Unit = p.failure(t)
+ }
+ val t = f(unusedEC)
+ assert(p.future.value == None, "Future executed logic!")
+ t
+ }
+
val defaultTimeout = 5 seconds
/* future specification */
@@ -68,6 +81,60 @@ class FutureTests extends MinimalScalaTest {
}
}
+ "Futures" should {
+ "have proper toString representations" in {
+ import ExecutionContext.Implicits.global
+ val s = 5
+ val f = new Exception("foo")
+ val t = Try(throw f)
+
+ val expectFailureString = "Future(Failure("+f+"))"
+ val expectSuccessString = "Future(Success(5))"
+ val expectNotCompleteString = "Future(<not completed>)"
+
+ Future.successful(s).toString mustBe expectSuccessString
+ Future.failed(f).toString mustBe expectFailureString
+ Future.fromTry(t).toString mustBe expectFailureString
+ val p = Promise[Int]()
+ p.toString mustBe expectNotCompleteString
+ Promise[Int]().success(s).toString mustBe expectSuccessString
+ Promise[Int]().failure(f).toString mustBe expectFailureString
+ Await.ready(Future { throw f }, 2000 millis).toString mustBe expectFailureString
+ Await.ready(Future { s }, 2000 millis).toString mustBe expectSuccessString
+
+ Future.never.toString mustBe "Future(<never>)"
+ Future.unit.toString mustBe "Future(Success(()))"
+ }
+
+ "have proper const representation for success" in {
+ val s = "foo"
+ val f = Future.successful(s)
+
+ ECNotUsed(ec => f.onFailure({ case _ => fail("onFailure should not have been called") })(ec))
+ assert( ECNotUsed(ec => f.recover({ case _ => fail("recover should not have been called")})(ec)) eq f)
+ assert( ECNotUsed(ec => f.recoverWith({ case _ => fail("flatMap should not have been called")})(ec)) eq f)
+ assert(f.fallbackTo(f) eq f, "Future.fallbackTo must be the same instance as Future.fallbackTo")
+ }
+
+ "have proper const representation for failure" in {
+ val e = new Exception("foo")
+ val f = Future.failed[Future[String]](e)
+
+ assert(f.mapTo[String] eq f, "Future.mapTo must be the same instance as Future.mapTo")
+ assert(f.zip(f) eq f, "Future.zip must be the same instance as Future.zip")
+ assert(f.flatten eq f, "Future.flatten must be the same instance as Future.flatten")
+ assert(f.failed eq f, "Future.failed must be the same instance as Future.failed")
+
+ ECNotUsed(ec => f.foreach(_ => fail("foreach should not have been called"))(ec))
+ ECNotUsed(ec => f.onSuccess({ case _ => fail("onSuccess should not have been called") })(ec))
+ assert( ECNotUsed(ec => f.map(_ => fail("map should not have been called"))(ec)) eq f)
+ assert( ECNotUsed(ec => f.flatMap(_ => fail("flatMap should not have been called"))(ec)) eq f)
+ assert( ECNotUsed(ec => f.filter(_ => fail("filter should not have been called"))(ec)) eq f)
+ assert( ECNotUsed(ec => f.collect({ case _ => fail("collect should not have been called")})(ec)) eq f)
+ assert( ECNotUsed(ec => f.zipWith(f)({ (_,_) => fail("zipWith should not have been called")})(ec)) eq f)
+ }
+ }
+
"The Future companion object" should {
"call ExecutionContext.prepare on apply" in {
val p = Promise[Boolean]()
@@ -85,6 +152,49 @@ class FutureTests extends MinimalScalaTest {
Await.result(f, defaultTimeout) mustBe ("foo")
Await.result(p.future, defaultTimeout) mustBe (true)
}
+
+ "have a unit member representing an already completed Future containing Unit" in {
+ assert(Future.unit ne null, "Future.unit must not be null")
+ assert(Future.unit eq Future.unit, "Future.unit must be the same instance as Future.unit")
+ assert(Future.unit.isCompleted, "Future.unit must already be completed")
+ assert(Future.unit.value.get == Success(()), "Future.unit must contain a Success(())")
+ }
+
+ "have a never member representing a never completed Future of Nothing" in {
+
+ val test: Future[Nothing] = Future.never
+
+ //Verify stable identifier
+ test match {
+ case Future.`never` =>
+ case _ => fail("Future.never did not match Future.`never`")
+ }
+
+ assert(test eq Future.never, "Future.never must be the same instance as Future.never")
+ assert(test ne null, "Future.never must not be null")
+ assert(!test.isCompleted && test.value.isEmpty, "Future.never must never be completed")
+ assert(test.failed eq test)
+ assert(test.asInstanceOf[Future[Future[Nothing]]].flatten eq test)
+ assert(test.zip(test) eq test)
+ assert(test.fallbackTo(test) eq test)
+ assert(test.mapTo[String] eq test)
+
+ ECNotUsed(ec => test.foreach(_ => fail("foreach should not have been called"))(ec))
+ ECNotUsed(ec => test.onSuccess({ case _ => fail("onSuccess should not have been called") })(ec))
+ ECNotUsed(ec => test.onFailure({ case _ => fail("onFailure should not have been called") })(ec))
+ ECNotUsed(ec => test.onComplete({ case _ => fail("onComplete should not have been called") })(ec))
+ ECNotUsed(ec => test.transform(identity, identity)(ec) eq test)
+ ECNotUsed(ec => test.transform(identity)(ec) eq test)
+ ECNotUsed(ec => test.transformWith(_ => fail("transformWith should not have been called"))(ec) eq test)
+ ECNotUsed(ec => test.map(identity)(ec) eq test)
+ ECNotUsed(ec => test.flatMap(_ => fail("flatMap should not have been called"))(ec) eq test)
+ ECNotUsed(ec => test.filter(_ => fail("filter should not have been called"))(ec) eq test)
+ ECNotUsed(ec => test.collect({ case _ => fail("collect should not have been called")})(ec) eq test)
+ ECNotUsed(ec => test.recover({ case _ => fail("recover should not have been called")})(ec) eq test)
+ ECNotUsed(ec => test.recoverWith({ case _ => fail("recoverWith should not have been called")})(ec) eq test)
+ ECNotUsed(ec => test.andThen({ case _ => fail("andThen should not have been called")})(ec) eq test)
+ ECNotUsed(ec => test.zipWith(test)({ (_,_) => fail("zipWith should not have been called")})(ec) eq test)
+ }
}
"The default ExecutionContext" should {
@@ -218,6 +328,142 @@ class FutureTests extends MinimalScalaTest {
} mustBe (r)
}
+ "transform results to results" in {
+ val f1 = Future.successful("foo").transform(_.map(_.toUpperCase))
+ val f2 = Future("bar").transform(_.map(_.toUpperCase))
+ Await.result(f1, defaultTimeout) mustBe "FOO"
+ Await.result(f2, defaultTimeout) mustBe "BAR"
+ }
+
+ "transform failures to failures" in {
+ val initial = new Exception("Initial")
+ val expected1 = new Exception("Expected1")
+ val expected2 = new Exception("Expected2")
+ val f1 = Future(throw initial) transform {
+ case Failure(`initial`) => Failure(expected1)
+ case x => x
+ }
+ val f2 = Future.failed(initial) transform {
+ case Failure(`initial`) => Failure(expected2)
+ case x => x
+ }
+
+ intercept[Exception] { Await.result(f1, defaultTimeout) } mustBe expected1
+ intercept[Exception] { Await.result(f2, defaultTimeout) } mustBe expected2
+ }
+
+ "transform failures to results" in {
+ val initial1 = new Exception("Initial1")
+ val initial2 = new Exception("Initial2")
+ val f1 = Future.failed[String](initial1) transform {
+ case Failure(`initial1`) => Success("foo")
+ case x => x
+ }
+ val f2 = Future[String](throw initial2) transform {
+ case Failure(`initial2`) => Success("bar")
+ case x => x
+ }
+ Await.result(f1, defaultTimeout) mustBe "foo"
+ Await.result(f2, defaultTimeout) mustBe "bar"
+ }
+
+ "transform results to failures" in {
+ val expected1 = new Exception("Expected1")
+ val expected2 = new Exception("Expected2")
+ val expected3 = new Exception("Expected3")
+ val f1 = Future.successful("foo") transform {
+ case Success("foo") => Failure(expected1)
+ case x => x
+ }
+ val f2 = Future("bar") transform {
+ case Success("bar") => Failure(expected2)
+ case x => x
+ }
+ val f3 = Future("bar") transform {
+ case Success("bar") => throw expected3
+ case x => x
+ }
+ intercept[Exception] { Await.result(f1, defaultTimeout) } mustBe expected1
+ intercept[Exception] { Await.result(f2, defaultTimeout) } mustBe expected2
+ intercept[Exception] { Await.result(f3, defaultTimeout) } mustBe expected3
+ }
+
+ "transformWith results" in {
+ val f1 = Future.successful("foo").transformWith {
+ case Success(r) => Future(r.toUpperCase)
+ case f @ Failure(_) => Future.fromTry(f)
+ }
+ val f2 = Future("bar").transformWith {
+ case Success(r) => Future(r.toUpperCase)
+ case f @ Failure(_) => Future.fromTry(f)
+ }
+ Await.result(f1, defaultTimeout) mustBe "FOO"
+ Await.result(f2, defaultTimeout) mustBe "BAR"
+ }
+
+ "transformWith failures" in {
+ val initial = new Exception("Initial")
+ val expected1 = new Exception("Expected1")
+ val expected2 = new Exception("Expected2")
+ val expected3 = new Exception("Expected3")
+
+ val f1 = Future[Int](throw initial).transformWith {
+ case Failure(`initial`) => Future failed expected1
+ case x => Future fromTry x
+ }
+ val f2 = Future.failed[Int](initial).transformWith {
+ case Failure(`initial`) => Future failed expected2
+ case x => Future fromTry x
+ }
+ val f3 = Future[Int](throw initial).transformWith {
+ case Failure(`initial`) => throw expected3
+ case x => Future fromTry x
+ }
+
+ intercept[Exception] { Await.result(f1, defaultTimeout) } mustBe expected1
+ intercept[Exception] { Await.result(f2, defaultTimeout) } mustBe expected2
+ intercept[Exception] { Await.result(f3, defaultTimeout) } mustBe expected3
+ }
+
+ "transformWith failures to future success" in {
+ val initial = new Exception("Initial")
+ val f1 = Future.failed[String](initial).transformWith {
+ case Failure(`initial`) => Future("FOO")
+ case _ => Future failed initial
+ }
+ val f2 = Future[String](throw initial).transformWith {
+ case Failure(`initial`) => Future("BAR")
+ case _ => Future failed initial
+ }
+ Await.result(f1, defaultTimeout) mustBe "FOO"
+ Await.result(f2, defaultTimeout) mustBe "BAR"
+ }
+
+ "transformWith results to future failures" in {
+ val initial = new Exception("Initial")
+ val expected1 = new Exception("Expected1")
+ val expected2 = new Exception("Expected2")
+ val expected3 = new Exception("Expected3")
+
+ val f1 = Future[String]("FOO") transformWith {
+ case Success("FOO") => Future failed expected1
+ case _ => Future successful "FOO"
+ }
+ val f2 = Future.successful("FOO") transformWith {
+ case Success("FOO") => Future failed expected2
+ case _ => Future successful "FOO"
+ }
+ val f3 = Future.successful("FOO") transformWith {
+ case Success("FOO") => throw expected3
+ case _ => Future successful "FOO"
+ }
+
+
+ intercept[Exception] { Await.result(f1, defaultTimeout) } mustBe expected1
+ intercept[Exception] { Await.result(f2, defaultTimeout) } mustBe expected2
+ intercept[Exception] { Await.result(f3, defaultTimeout) } mustBe expected3
+ }
+
"andThen like a boss" in {
val q = new java.util.concurrent.LinkedBlockingQueue[Int]
for (i <- 1 to 1000) {
@@ -281,6 +527,33 @@ class FutureTests extends MinimalScalaTest {
Await.result(successful, timeout) mustBe (("foo", "foo"))
}
+ "zipWith" in {
+ val timeout = 10000 millis
+ val f = new IllegalStateException("test")
+ intercept[IllegalStateException] {
+ val failed = Future.failed[String](f).zipWith(Future.successful("foo")) { _ -> _ }
+ Await.result(failed, timeout)
+ } mustBe (f)
+
+ intercept[IllegalStateException] {
+ val failed = Future.successful("foo").zipWith(Future.failed[String](f)) { _ -> _ }
+ Await.result(failed, timeout)
+ } mustBe (f)
+
+ intercept[IllegalStateException] {
+ val failed = Future.failed[String](f).zipWith(Future.failed[String](f)) { _ -> _ }
+ Await.result(failed, timeout)
+ } mustBe (f)
+
+ val successful = Future.successful("foo").zipWith(Future.successful("foo")) { _ -> _ }
+ Await.result(successful, timeout) mustBe (("foo", "foo"))
+
+ val failure = Future.successful("foo").zipWith(Future.successful("foo")) { (_,_) => throw f }
+ intercept[IllegalStateException] {
+ Await.result(failure, timeout)
+ } mustBe (f)
+ }
+
"fold" in {
val timeout = 10000 millis
def async(add: Int, wait: Int) = Future {
diff --git a/test/files/jvm/scala-concurrent-tck.check b/test/files/jvm/scala-concurrent-tck.check
new file mode 100644
index 0000000000..bbe73c9982
--- /dev/null
+++ b/test/files/jvm/scala-concurrent-tck.check
@@ -0,0 +1 @@
+warning: there were 74 deprecation warnings; re-run with -deprecation for details
diff --git a/test/files/jvm/scala-concurrent-tck.scala b/test/files/jvm/scala-concurrent-tck.scala
index ce86d4aef0..ba405e97bd 100644
--- a/test/files/jvm/scala-concurrent-tck.scala
+++ b/test/files/jvm/scala-concurrent-tck.scala
@@ -165,6 +165,100 @@ def testTransformFailure(): Unit = once {
g onFailure { case e => done(e eq transformed) }
}
+ def testTransformResultToResult(): Unit = once {
+ done =>
+ Future("foo").transform {
+ case Success(s) => Success(s.toUpperCase)
+ case Failure(f) => throw new Exception("test failed")
+ } onComplete {
+ case Success("FOO") => done(true)
+ case _ => done(false)
+ }
+ }
+
+ def testTransformResultToFailure(): Unit = once {
+ done =>
+ val e = new Exception("expected")
+ Future("foo").transform {
+ case Success(s) => Failure(e)
+ case Failure(f) => throw new Exception("test failed")
+ } onComplete {
+ case Failure(`e`) => done(true)
+ case _ => done(false)
+ }
+ }
+
+ def testTransformFailureToResult(): Unit = once {
+ done =>
+ val e = "foo"
+ Future(throw new Exception("initial")).transform {
+ case Success(s) => throw new Exception("test failed")
+ case Failure(f) => Success(e)
+ } onComplete {
+ case Success(`e`) => done(true)
+ case _ => done(false)
+ }
+ }
+
+ def testTransformFailureToFailure(): Unit = once {
+ done =>
+ val e = new Exception("expected")
+ Future(throw new Exception("initial")).transform {
+ case Success(s) => throw new Exception("test failed")
+ case Failure(f) => Failure(e)
+ } onComplete {
+ case Failure(`e`) => done(true)
+ case _ => done(false)
+ }
+ }
+
+ def testTransformWithResultToResult(): Unit = once {
+ done =>
+ Future("foo").transformWith {
+ case Success(s) => Future(s.toUpperCase)
+ case Failure(f) => throw new Exception("test failed")
+ } onComplete {
+ case Success("FOO") => done(true)
+ case _ => done(false)
+ }
+ }
+
+ def testTransformWithResultToFailure(): Unit = once {
+ done =>
+ val e = new Exception("expected")
+ Future("foo").transformWith {
+ case Success(s) => Future(throw e)
+ case Failure(f) => throw new Exception("test failed")
+ } onComplete {
+ case Failure(`e`) => done(true)
+ case _ => done(false)
+ }
+ }
+
+ def testTransformWithFailureToResult(): Unit = once {
+ done =>
+ val e = "foo"
+ Future(throw new Exception("initial")).transformWith {
+ case Success(s) => throw new Exception("test failed")
+ case Failure(f) => Future(e)
+ } onComplete {
+ case Success(`e`) => done(true)
+ case _ => done(false)
+ }
+ }
+
+ def testTransformWithFailureToFailure(): Unit = once {
+ done =>
+ val e = new Exception("expected")
+ Future(throw new Exception("initial")).transformWith {
+ case Success(s) => throw new Exception("test failed")
+ case Failure(f) => Future(throw e)
+ } onComplete {
+ case Failure(`e`) => done(true)
+ case _ => done(false)
+ }
+ }
+
def testFoldFailure(): Unit = once {
done =>
val f = Future[Unit] { throw new Exception("expected") }
@@ -352,6 +446,14 @@ def testTransformFailure(): Unit = once {
h onFailure { case e => done(e eq cause) }
}
+ def testFallbackToThis(): Unit = {
+ def check(f: Future[Int]) = assert((f fallbackTo f) eq f)
+
+ check(Future { 1 })
+ check(Future.successful(1))
+ check(Future.failed[Int](new Exception))
+ }
+
testMapSuccess()
testMapFailure()
testFlatMapSuccess()
@@ -373,6 +475,16 @@ def testTransformFailure(): Unit = once {
testFallbackToFailure()
testTransformSuccess()
testTransformSuccessPF()
+ testTransformFailure()
+ testTransformFailurePF()
+ testTransformResultToResult()
+ testTransformResultToFailure()
+ testTransformFailureToResult()
+ testTransformFailureToFailure()
+ testTransformWithResultToResult()
+ testTransformWithResultToFailure()
+ testTransformWithFailureToResult()
+ testTransformWithFailureToFailure()
}
@@ -593,6 +705,17 @@ trait Exceptions extends TestBase {
}
+trait GlobalExecutionContext extends TestBase {
+ def testNameOfGlobalECThreads(): Unit = once {
+ done => Future({
+ val expectedName = "scala-execution-context-global-"+ Thread.currentThread.getId
+ done(expectedName == Thread.currentThread.getName)
+ })(ExecutionContext.global)
+ }
+
+ testNameOfGlobalECThreads()
+}
+
trait CustomExecutionContext extends TestBase {
import scala.concurrent.{ ExecutionContext, Awaitable }
@@ -772,6 +895,7 @@ with FutureProjections
with Promises
with BlockContexts
with Exceptions
+with GlobalExecutionContext
with CustomExecutionContext
with ExecutionContextPrepare
{
diff --git a/test/files/jvm/t7146.scala b/test/files/jvm/t7146.scala
index aaa3dc7ca4..ea734472d5 100644
--- a/test/files/jvm/t7146.scala
+++ b/test/files/jvm/t7146.scala
@@ -10,7 +10,7 @@ object Test {
ExecutionContext.global.toString.startsWith("scala.concurrent.impl.ExecutionContextImpl"))
val i = ExecutionContext.global.asInstanceOf[{ def executor: Executor }]
println("should be scala.concurrent.forkjoin.ForkJoinPool == " +
- i.executor.toString.startsWith("scala.concurrent.forkjoin.ForkJoinPool"))
+ (i.executor.getClass.getSuperclass.getName == "scala.concurrent.forkjoin.ForkJoinPool"))
val u = i.executor.
asInstanceOf[{ def getUncaughtExceptionHandler: Thread.UncaughtExceptionHandler }].
getUncaughtExceptionHandler
diff --git a/test/files/jvm/t8582.check b/test/files/jvm/t8582.check
index 564f482ff8..1d19f1d6a8 100644
--- a/test/files/jvm/t8582.check
+++ b/test/files/jvm/t8582.check
@@ -1,3 +1,6 @@
+t8582.scala:17: warning: class BeanInfo in package beans is deprecated: the generation of BeanInfo classes is no longer supported
+ class C1
+ ^
getClass on module gives module class
class p1.p2.Singleton$Singleton$
diff --git a/test/files/jvm/t8582.flags b/test/files/jvm/t8582.flags
new file mode 100644
index 0000000000..dcc59ebe32
--- /dev/null
+++ b/test/files/jvm/t8582.flags
@@ -0,0 +1 @@
+-deprecation
diff --git a/test/files/neg/beanInfoDeprecation.check b/test/files/neg/beanInfoDeprecation.check
new file mode 100644
index 0000000000..788b277818
--- /dev/null
+++ b/test/files/neg/beanInfoDeprecation.check
@@ -0,0 +1,6 @@
+beanInfoDeprecation.scala:2: warning: class BeanInfo in package beans is deprecated: the generation of BeanInfo classes is no longer supported
+class C
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/beanInfoDeprecation.flags b/test/files/neg/beanInfoDeprecation.flags
new file mode 100644
index 0000000000..c6bfaf1f64
--- /dev/null
+++ b/test/files/neg/beanInfoDeprecation.flags
@@ -0,0 +1 @@
+-deprecation -Xfatal-warnings
diff --git a/test/files/neg/beanInfoDeprecation.scala b/test/files/neg/beanInfoDeprecation.scala
new file mode 100644
index 0000000000..c7e3a86202
--- /dev/null
+++ b/test/files/neg/beanInfoDeprecation.scala
@@ -0,0 +1,2 @@
+@scala.beans.BeanInfo
+class C
diff --git a/test/files/neg/t8764.check b/test/files/neg/t8764.check
deleted file mode 100644
index 6d89ebe106..0000000000
--- a/test/files/neg/t8764.check
+++ /dev/null
@@ -1,6 +0,0 @@
-t8764.scala:8: error: type mismatch;
- found : AnyVal
- required: Double
- val d: Double = a.productElement(0)
- ^
-one error found
diff --git a/test/files/neg/t8764.flags b/test/files/neg/t8764.flags
deleted file mode 100644
index 48fd867160..0000000000
--- a/test/files/neg/t8764.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xexperimental
diff --git a/test/files/neg/t8764.scala b/test/files/neg/t8764.scala
deleted file mode 100644
index dc5bfb0160..0000000000
--- a/test/files/neg/t8764.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-object Main {
-
- case class IntAndDouble(i: Int, d: Double)
-
- // a.productElement used to be Int => Double
- // now: Int => AnyVal
- val a = IntAndDouble(1, 5.0)
- val d: Double = a.productElement(0)
-}
diff --git a/test/files/neg/t8849.check b/test/files/neg/t8849.check
new file mode 100644
index 0000000000..15b00aee8b
--- /dev/null
+++ b/test/files/neg/t8849.check
@@ -0,0 +1,7 @@
+t8849.scala:8: error: ambiguous implicit values:
+ both value global in object Implicits of type => scala.concurrent.ExecutionContext
+ and value dummy of type scala.concurrent.ExecutionContext
+ match expected type scala.concurrent.ExecutionContext
+ require(implicitly[ExecutionContext] eq dummy)
+ ^
+one error found
diff --git a/test/files/neg/t8849.scala b/test/files/neg/t8849.scala
new file mode 100644
index 0000000000..336f16b40f
--- /dev/null
+++ b/test/files/neg/t8849.scala
@@ -0,0 +1,10 @@
+import scala.concurrent.ExecutionContext
+import ExecutionContext.Implicits.global
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ implicit val dummy: ExecutionContext = null
+ require(scala.concurrent.ExecutionContext.Implicits.global ne null)
+ require(implicitly[ExecutionContext] eq dummy)
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/alladin763.scala b/test/files/pos/alladin763.scala
new file mode 100644
index 0000000000..29c9b25318
--- /dev/null
+++ b/test/files/pos/alladin763.scala
@@ -0,0 +1,37 @@
+// Test from http://lrytz.github.io/scala-aladdin-bugtracker/displayItem.do%3Fid=763.html
+// and expanded with package object variants
+
+
+trait Foo { type T; def apply() : T }
+object e extends Foo { type T = Int; def apply() = 42 }
+
+package p {
+ trait T[X] { def O : { def apply(): X } }
+ object `package` extends T[Int] {
+ def O: { def apply(): Int } = new { def apply(): Int = 42 }
+ }
+
+ object Test {
+ val x: Int = O()
+ }
+}
+
+object Test {
+
+ val f = new Foo { type T = Int; def apply() = 42 }
+
+ def main(args: Array[String]): Unit = {
+ val g = new Foo { type T = Int; def apply() = 42 }
+
+ (e: Foo)()
+ val ee: Int = e()
+
+ (f: Foo)()
+ val ff: Int = f()
+
+ (g: Foo)()
+ val gg: Int = g()
+
+ val pp: Int = p.O()
+ }
+}
diff --git a/test/files/pos/t8462.scala b/test/files/pos/t8462.scala
new file mode 100644
index 0000000000..6946cf8e5e
--- /dev/null
+++ b/test/files/pos/t8462.scala
@@ -0,0 +1,11 @@
+
+trait ConstantOps {
+ def exprs = (
+ 1 << 2L : Int, // was: error: type mismatch; found : Long(4L)
+ 64 >> 2L : Int, // was: error: type mismatch; found : Long(4L)
+ 64 >>> 2L : Int, // was: error: type mismatch; found : Long(4L)
+ 'a' << 2L : Int,
+ 'a' >> 2L : Int,
+ 'a'>>> 2L : Int
+ )
+}
diff --git a/test/files/pos/t8862a.scala b/test/files/pos/t8862a.scala
new file mode 100644
index 0000000000..f9576707ba
--- /dev/null
+++ b/test/files/pos/t8862a.scala
@@ -0,0 +1,47 @@
+package p {
+
+ abstract class C[A] {
+ def x: A
+ implicit def oops: A = x
+ implicit def oopso: Option[A] = None
+ }
+
+ package q {
+
+ class Oops
+
+ object `package` extends C[Oops] {
+ override def x = new Oops
+ }
+
+ object Blah {
+ oops
+ oopso
+
+ // implicits found in enclosing context
+ implicitly[Oops]
+ implicitly[Option[Oops]]
+ }
+ }
+}
+
+package other {
+
+ object Blah {
+ // implicits found through this import
+ import p.q._
+
+ oops
+ oopso
+
+ implicitly[Oops]
+ implicitly[Option[Oops]]
+ }
+
+
+ object Blee {
+ // implicits found through the companion implicits
+ implicitly[p.q.Oops]
+ implicitly[Option[p.q.Oops]]
+ }
+}
diff --git a/test/files/pos/t8862b.scala b/test/files/pos/t8862b.scala
new file mode 100644
index 0000000000..8be7fb5fab
--- /dev/null
+++ b/test/files/pos/t8862b.scala
@@ -0,0 +1,12 @@
+package p {
+ trait T[X] { def O : { def apply(): X } }
+ object `package` extends T[Int] {
+ def O: { def apply(): Int } = new { def apply(): Int = 42 }
+ }
+
+ object Test {
+ def main(args: Array[String]): Unit = {
+ val x: Int = O()
+ }
+ }
+}
diff --git a/test/files/run/future-flatmap-exec-count.check b/test/files/run/future-flatmap-exec-count.check
index dd9dce64ed..7065c133e0 100644
--- a/test/files/run/future-flatmap-exec-count.check
+++ b/test/files/run/future-flatmap-exec-count.check
@@ -1,3 +1,4 @@
+warning: there was one deprecation warning; re-run with -deprecation for details
mapping
execute()
flatmapping
diff --git a/test/files/run/inline-ex-handlers.check b/test/files/run/inline-ex-handlers.check
index 7c885d2cc9..27a277d314 100644
--- a/test/files/run/inline-ex-handlers.check
+++ b/test/files/run/inline-ex-handlers.check
@@ -123,12 +123,12 @@
300 RETURN(UNIT)
@@ -583,6 +603,6 @@
with finalizer: null
-- catch (Throwable) in ArrayBuffer(7, 9, 10) starting at: 6
-+ catch (Throwable) in ArrayBuffer(7, 9, 10, 11) starting at: 6
+- catch (Throwable) in Vector(7, 9, 10) starting at: 6
++ catch (Throwable) in Vector(7, 9, 10, 11) starting at: 6
consisting of blocks: List(6)
with finalizer: null
-- catch (Throwable) in ArrayBuffer(4, 6, 7, 9, 10) starting at: 3
-+ catch (Throwable) in ArrayBuffer(4, 6, 7, 9, 10, 11, 12) starting at: 3
+- catch (Throwable) in Vector(4, 6, 7, 9, 10) starting at: 3
++ catch (Throwable) in Vector(4, 6, 7, 9, 10, 11, 12) starting at: 3
consisting of blocks: List(3)
@@ -618,3 +638,3 @@
startBlock: 1
@@ -171,8 +171,8 @@
}
@@ -690,3 +730,3 @@
with finalizer: null
-- catch (<none>) in ArrayBuffer(4, 5, 6, 8) starting at: 3
-+ catch (<none>) in ArrayBuffer(4, 5, 6, 8, 10) starting at: 3
+- catch (<none>) in Vector(4, 5, 6, 8) starting at: 3
++ catch (<none>) in Vector(4, 5, 6, 8, 10) starting at: 3
consisting of blocks: List(3)
@@ -714,5 +754,5 @@
def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
@@ -276,12 +276,12 @@
}
@@ -852,6 +918,6 @@
with finalizer: null
-- catch (Throwable) in ArrayBuffer(13, 14, 15, 18, 20, 21, 23) starting at: 4
-+ catch (Throwable) in ArrayBuffer(13, 14, 15, 18, 20, 21, 23, 25) starting at: 4
+- catch (Throwable) in Vector(13, 14, 15, 18, 20, 21, 23) starting at: 4
++ catch (Throwable) in Vector(13, 14, 15, 18, 20, 21, 23, 25) starting at: 4
consisting of blocks: List(9, 8, 6, 5, 4)
with finalizer: null
-- catch (<none>) in ArrayBuffer(4, 5, 6, 9, 13, 14, 15, 18, 20, 21, 23) starting at: 3
-+ catch (<none>) in ArrayBuffer(4, 5, 6, 9, 13, 14, 15, 18, 20, 21, 23, 25, 26) starting at: 3
+- catch (<none>) in Vector(4, 5, 6, 9, 13, 14, 15, 18, 20, 21, 23) starting at: 3
++ catch (<none>) in Vector(4, 5, 6, 9, 13, 14, 15, 18, 20, 21, 23, 25, 26) starting at: 3
consisting of blocks: List(3)
@@ -879,5 +945,5 @@
def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
@@ -317,8 +317,8 @@
127 CALL_METHOD scala.Predef.println (dynamic)
@@ -964,3 +1034,3 @@
with finalizer: null
-- catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16) starting at: 3
-+ catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16, 17) starting at: 3
+- catch (IllegalArgumentException) in Vector(6, 7, 8, 11, 13, 14, 16) starting at: 3
++ catch (IllegalArgumentException) in Vector(6, 7, 8, 11, 13, 14, 16, 17) starting at: 3
consisting of blocks: List(3)
@@ -988,5 +1058,5 @@
def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
diff --git a/test/files/run/lub-visibility.check b/test/files/run/lub-visibility.check
index 70734966f0..135cb3cb76 100644
--- a/test/files/run/lub-visibility.check
+++ b/test/files/run/lub-visibility.check
@@ -6,6 +6,6 @@ scala> // should infer List[scala.collection.immutable.Seq[Nothing]]
scala> // but reverted that for SI-5534.
scala> val x = List(List(), Vector())
-x: List[scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing] with java.io.Serializable] = List(List(), Vector())
+x: List[scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing] with Serializable] = List(List(), Vector())
scala> :quit
diff --git a/test/files/run/t2251b.check b/test/files/run/t2251b.check
index 4231fc6ea6..b60698d605 100644
--- a/test/files/run/t2251b.check
+++ b/test/files/run/t2251b.check
@@ -1,4 +1,4 @@
-TypeTag[List[scala.collection.immutable.LinearSeq[B[_ >: D with C <: B[_ >: D with C <: A]]] with scala.collection.AbstractSeq[B[_ >: D with C <: B[_ >: D with C <: A]]] with java.io.Serializable]]
+TypeTag[List[scala.collection.immutable.LinearSeq[B[_ >: D with C <: B[_ >: D with C <: A]]] with scala.collection.AbstractSeq[B[_ >: D with C <: B[_ >: D with C <: A]]] with Serializable]]
TypeTag[List[scala.collection.immutable.Iterable[B[_ >: F with E with D with C <: B[_ >: F with E with D with C <: A]]] with F with Int => Any]]
TypeTag[List[scala.collection.immutable.Seq[B[_ >: D with C <: B[_ >: D with C <: A]]] with scala.collection.AbstractSeq[B[_ >: D with C <: B[_ >: D with C <: A]]] with Serializable]]
TypeTag[List[scala.collection.Set[_ >: G with F <: B[_ >: G with F <: B[_ >: G with F <: A]]]]]
@@ -6,6 +6,6 @@ TypeTag[List[scala.collection.Set[_ >: G with F <: B[_ >: G with F <: B[_ >: G w
TypeTag[List[scala.collection.Set[_ >: G with F <: B[_ >: G with F <: B[_ >: G with F <: A]]]]]
TypeTag[List[Seq[B[_ >: G with F <: B[_ >: G with F <: A]]]]]
TypeTag[List[scala.collection.Map[_ >: F with C <: B[_ >: F with C <: B[_ >: F with C <: A]], B[_ >: G with D <: B[_ >: G with D <: A]]]]]
-TypeTag[List[scala.collection.AbstractSeq[B[_ >: G with F <: B[_ >: G with F <: A]]] with scala.collection.LinearSeq[B[_ >: G with F <: B[_ >: G with F <: A]]] with java.io.Serializable]]
+TypeTag[List[scala.collection.AbstractSeq[B[_ >: G with F <: B[_ >: G with F <: A]]] with scala.collection.LinearSeq[B[_ >: G with F <: B[_ >: G with F <: A]]] with Serializable]]
TypeTag[List[Seq[B[_ >: G with F <: B[_ >: G with F <: A]]]]]
TypeTag[List[Seq[B[_ >: G with F <: B[_ >: G with F <: A]]]]]
diff --git a/test/files/run/t4332.scala b/test/files/run/t4332.scala
index 5a67922911..1c7e7d73de 100644
--- a/test/files/run/t4332.scala
+++ b/test/files/run/t4332.scala
@@ -12,7 +12,7 @@ object Test extends DirectTest {
}
def isExempt(sym: Symbol) = {
- val exempt = Set("view", "repr", "sliceWithKnownDelta", "sliceWithKnownBound", "transform")
+ val exempt = Set("view", "repr", "sliceWithKnownDelta", "sliceWithKnownBound", "transform", "filterImpl")
(exempt contains sym.name.decoded)
}
diff --git a/test/files/run/t6827.check b/test/files/run/t6827.check
index 3a3a71c67d..4889e05be8 100644
--- a/test/files/run/t6827.check
+++ b/test/files/run/t6827.check
@@ -1,6 +1,6 @@
-start at -5: java.lang.IllegalArgumentException: requirement failed: start -5 out of range 10
-start at -1: java.lang.IllegalArgumentException: requirement failed: start -1 out of range 10
-start at limit: java.lang.IllegalArgumentException: requirement failed: start 10 out of range 10
+start at -5: java.lang.ArrayIndexOutOfBoundsException: -5
+start at -1: java.lang.ArrayIndexOutOfBoundsException: -1
+start at limit: ok
start at limit-1: ok
first 10: ok
read all: ok
@@ -8,8 +8,8 @@ test huge len: ok
5 from 5: ok
20 from 5: ok
test len overflow: ok
-start beyond limit: java.lang.IllegalArgumentException: requirement failed: start 30 out of range 10
+start beyond limit: ok
read 0: ok
read -1: ok
-invalid read 0: java.lang.IllegalArgumentException: requirement failed: start 30 out of range 10
-invalid read -1: java.lang.IllegalArgumentException: requirement failed: start 30 out of range 10
+invalid read 0: ok
+invalid read -1: ok
diff --git a/test/files/run/t6827.scala b/test/files/run/t6827.scala
index 8e17af09e2..eb020711bb 100644
--- a/test/files/run/t6827.scala
+++ b/test/files/run/t6827.scala
@@ -31,4 +31,24 @@ object Test extends App {
// okay, see SI-7128
"...".toIterator.copyToArray(new Array[Char](0), 0, 0)
+
+
+ // Bonus test from @som-snytt to check for overflow in
+ // index calculations.
+ def testOverflow(start: Int, len: Int, expected: List[Char]) {
+ def copyFromIterator = {
+ val arr = Array.fill[Char](3)('-')
+ "abc".toIterator.copyToArray(arr, start, len)
+ arr.toList
+ }
+ def copyFromArray = {
+ val arr = Array.fill[Char](3)('-')
+ "abc".toArray.copyToArray(arr, start, len)
+ arr.toList
+ }
+ assert(copyFromIterator == expected)
+ assert(copyFromArray == expected)
+ }
+ testOverflow(1, Int.MaxValue - 1, "-ab".toList)
+ testOverflow(1, Int.MaxValue, "-ab".toList)
}
diff --git a/test/files/run/t7521/Test.scala b/test/files/run/t7521/Test.scala
new file mode 100644
index 0000000000..e9816ad6cb
--- /dev/null
+++ b/test/files/run/t7521/Test.scala
@@ -0,0 +1,5 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ new Wrapper(new Array[Int](1))
+ }
+}
diff --git a/test/files/run/t7521/Wrapper.scala b/test/files/run/t7521/Wrapper.scala
new file mode 100644
index 0000000000..0b923f8924
--- /dev/null
+++ b/test/files/run/t7521/Wrapper.scala
@@ -0,0 +1 @@
+class Wrapper[Repr](val xs: Repr) extends AnyVal
diff --git a/test/files/run/t7521b.check b/test/files/run/t7521b.check
new file mode 100644
index 0000000000..4d96df106d
--- /dev/null
+++ b/test/files/run/t7521b.check
@@ -0,0 +1,7 @@
+= Java Erased Signatures =
+public int C.a(Wrapper)
+public int C.b(Wrapper)
+
+= Java Generic Signatures =
+public int C.a(Wrapper<int[]>)
+public int C.b(Wrapper<java.lang.Object>)
diff --git a/test/files/run/t7521b.scala b/test/files/run/t7521b.scala
new file mode 100644
index 0000000000..c9e27f28b4
--- /dev/null
+++ b/test/files/run/t7521b.scala
@@ -0,0 +1,20 @@
+class Wrapper[X](x: X)
+
+class C {
+ def a(w: Wrapper[Array[Int]]) = 0
+ def b(w: Wrapper[Int]) = 0
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val c = new C
+ c.a(new Wrapper(Array(1, 2)))
+ c.b(new Wrapper(1))
+
+ val methods = classOf[C].getDeclaredMethods.sortBy(_.getName)
+ println("= Java Erased Signatures =")
+ println(methods.mkString("\n"))
+ println("\n= Java Generic Signatures =")
+ println(methods.map(_.toGenericString).mkString("\n"))
+ }
+}
diff --git a/test/files/run/t8575.scala b/test/files/run/t8575.scala
new file mode 100644
index 0000000000..6e3e57f2be
--- /dev/null
+++ b/test/files/run/t8575.scala
@@ -0,0 +1,33 @@
+class E[F]
+class A
+class B
+class C
+
+trait TypeMember {
+ type X
+
+ // This call throws an AbstractMethodError, because it invokes the erasure of
+ // consume(X): Unit that is consume(Object): Unit. But the corresponding
+ // bridge method is not generated.
+ consume(value)
+
+ def value: X
+ def consume(x: X): Unit
+}
+
+object Test extends TypeMember {
+ type F = A with B
+
+ // works if replaced by type X = E[A with B with C]
+ type X = E[F with C]
+
+ val value = new E[F with C]
+
+ // This call passes, since it invokes consume(E): Unit
+ consume(value)
+ def consume(x: X) {}
+
+ def main(args: Array[String]) {
+
+ }
+}
diff --git a/test/files/run/t8575b.scala b/test/files/run/t8575b.scala
new file mode 100644
index 0000000000..0d731ccf9f
--- /dev/null
+++ b/test/files/run/t8575b.scala
@@ -0,0 +1,17 @@
+class A
+class B
+class C
+
+object Test {
+ type F = A with B
+
+ def main(args: Array[String]) {
+ import reflect.runtime.universe._
+ val t1 = typeOf[F with C]
+ val t2 = typeOf[(A with B) with C]
+ val t3 = typeOf[A with B with C]
+ assert(t1 =:= t2)
+ assert(t2 =:= t3)
+ assert(t3 =:= t1)
+ }
+}
diff --git a/test/files/run/t8575c.scala b/test/files/run/t8575c.scala
new file mode 100644
index 0000000000..8219952299
--- /dev/null
+++ b/test/files/run/t8575c.scala
@@ -0,0 +1,23 @@
+class C
+
+trait TypeMember {
+ type X
+ type Y
+ type Z
+}
+
+object Test extends TypeMember {
+ type A = X with Y
+ type B = Z with A
+ type F = A with B
+
+ def main(args: Array[String]) {
+ import reflect.runtime.universe._
+ val t1 = typeOf[F with C]
+ val t2 = typeOf[(A with B) with C]
+ val t3 = typeOf[A with B with C]
+ assert(t1 =:= t2)
+ assert(t2 =:= t3)
+ assert(t3 =:= t1)
+ }
+}
diff --git a/test/files/run/t8710.scala b/test/files/run/t8710.scala
new file mode 100644
index 0000000000..15aab5b8a4
--- /dev/null
+++ b/test/files/run/t8710.scala
@@ -0,0 +1,17 @@
+class Bar(val x: Int) extends AnyVal {
+ def f: String = f(0)
+ private def f(x: Int): String = ""
+}
+
+class Baz(val x: Int) extends AnyVal {
+ def f: String = "123"
+ private def f(x: Int): String = ""
+}
+object Baz {
+ def x(b: Baz) = b.f(0)
+}
+
+object Test extends App {
+ new Bar(23).f
+ new Baz(23).f
+}
diff --git a/test/files/run/t8764.check b/test/files/run/t8764.check
deleted file mode 100644
index 6260069602..0000000000
--- a/test/files/run/t8764.check
+++ /dev/null
@@ -1,5 +0,0 @@
-IntOnly: should return an unboxed int
-Int: int
-IntAndDouble: should just box and return Anyval
-Double: class java.lang.Double
-Int: class java.lang.Integer
diff --git a/test/files/run/t8764.flags b/test/files/run/t8764.flags
deleted file mode 100644
index 48fd867160..0000000000
--- a/test/files/run/t8764.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xexperimental
diff --git a/test/files/run/t8764.scala b/test/files/run/t8764.scala
deleted file mode 100644
index decc658f6e..0000000000
--- a/test/files/run/t8764.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-object Test extends App {
-case class IntOnly(i: Int, j: Int)
-
-println("IntOnly: should return an unboxed int")
-val a = IntOnly(1, 2)
-val i: Int = a.productElement(0)
-println(s"Int: ${a.productElement(0).getClass}")
-
-case class IntAndDouble(i: Int, d: Double)
-
-println("IntAndDouble: should just box and return Anyval")
-val b = IntAndDouble(1, 2.0)
-val j: AnyVal = b.productElement(0)
-println(s"Double: ${b.productElement(1).getClass}")
-println(s"Int: ${b.productElement(0).getClass}")
-}
diff --git a/test/files/run/t8944/A_1.scala b/test/files/run/t8944/A_1.scala
new file mode 100644
index 0000000000..7ff80327b0
--- /dev/null
+++ b/test/files/run/t8944/A_1.scala
@@ -0,0 +1 @@
+case class A(private val x: String)
diff --git a/test/files/run/t8944/A_2.scala b/test/files/run/t8944/A_2.scala
new file mode 100644
index 0000000000..3dcdea1583
--- /dev/null
+++ b/test/files/run/t8944/A_2.scala
@@ -0,0 +1,6 @@
+case class Other(private val x: String) // consume a fresh name suffix
+
+// the param accessor will now be called "x$2",
+// whereas the previously compiled client expects it to be called
+// x$1
+case class A(private val x: String)
diff --git a/test/files/run/t8944/Test_1.scala b/test/files/run/t8944/Test_1.scala
new file mode 100644
index 0000000000..fe466693cf
--- /dev/null
+++ b/test/files/run/t8944/Test_1.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ val A("") = new A("")
+}
diff --git a/test/files/run/t8944b.scala b/test/files/run/t8944b.scala
new file mode 100644
index 0000000000..f469122ce6
--- /dev/null
+++ b/test/files/run/t8944b.scala
@@ -0,0 +1,9 @@
+case class A(private var foo: Any) {
+ def m = { def foo = 42 /*will be lamba lifted to `A#foo$1`*/ }
+}
+object Test {
+ def main(args: Array[String]): Unit = {
+ val A("") = new A("")
+ new A("").m
+ }
+}
diff --git a/test/files/run/t8944c.check b/test/files/run/t8944c.check
new file mode 100644
index 0000000000..7738f76980
--- /dev/null
+++ b/test/files/run/t8944c.check
@@ -0,0 +1,5 @@
+private java.lang.Object Foo.ant()
+public java.lang.Object Foo.ant$access$0()
+private scala.collection.Seq Foo.cat()
+public scala.collection.Seq Foo.cat$access$2()
+public java.lang.Object Foo.elk()
diff --git a/test/files/run/t8944c.scala b/test/files/run/t8944c.scala
new file mode 100644
index 0000000000..95c2143851
--- /dev/null
+++ b/test/files/run/t8944c.scala
@@ -0,0 +1,8 @@
+case class Foo[A](private val ant: Any, elk: Any, private val cat: A*)
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ def pred(name: String) = Set("ant", "elk", "cat").exists(name contains _)
+ println(classOf[Foo[_]].getDeclaredMethods.filter(m => pred(m.getName)).sortBy(_.getName).mkString("\n"))
+ }
+}
diff --git a/test/files/run/t8955.scala b/test/files/run/t8955.scala
new file mode 100644
index 0000000000..afa31aa5d7
--- /dev/null
+++ b/test/files/run/t8955.scala
@@ -0,0 +1,12 @@
+import scala.collection.parallel.immutable.ParSet
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ for (i <- 1 to 2000) test()
+ }
+
+ def test() {
+ ParSet[Int]((1 to 10000): _*) foreach (x => ()) // hangs non deterministically
+ }
+}
+
diff --git a/test/files/run/t9030.scala b/test/files/run/t9030.scala
new file mode 100644
index 0000000000..48d24e5b54
--- /dev/null
+++ b/test/files/run/t9030.scala
@@ -0,0 +1,19 @@
+object Test extends App {
+
+ // For these methods, the compiler emits calls to BoxesRuntime.equalsNumNum/equalsNumChar/equalsNumObject directly
+
+ def numNum(a: java.lang.Number, b: java.lang.Number) = assert(a == b)
+ def numChar(a: java.lang.Number, b: java.lang.Character) = assert(a == b)
+ def numObject(a: java.lang.Number, b: java.lang.Object) = assert(a == b)
+
+ // The compiler doesn't use equalsCharObject directly, but still adding an example for completeness
+
+ def charObject(a: java.lang.Character, b: java.lang.Object) = assert(a == b)
+
+ numNum(new Integer(1), new Integer(1))
+ numChar(new Integer(97), new Character('a'))
+ numObject(new Integer(1), new Integer(1))
+ numObject(new Integer(97), new Character('a'))
+
+ charObject(new Character('a'), new Integer(97))
+}
diff --git a/test/junit/scala/collection/SeqViewTest.scala b/test/junit/scala/collection/SeqViewTest.scala
new file mode 100644
index 0000000000..24474fc4b9
--- /dev/null
+++ b/test/junit/scala/collection/SeqViewTest.scala
@@ -0,0 +1,16 @@
+package scala.collection
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Assert._
+import org.junit.Test
+
+@RunWith(classOf[JUnit4])
+class SeqViewTest {
+
+ @Test
+ def test_SI8691() {
+ // Really just testing to make sure ++: doesn't throw an exception
+ assert( Seq(1,2) ++: Seq(3,4).view == Seq(1,2,3,4) )
+ }
+}
diff --git a/test/junit/scala/collection/SetMapConsistencyTest.scala b/test/junit/scala/collection/SetMapConsistencyTest.scala
index 261c11a98b..0749e61c09 100644
--- a/test/junit/scala/collection/SetMapConsistencyTest.scala
+++ b/test/junit/scala/collection/SetMapConsistencyTest.scala
@@ -529,4 +529,15 @@ class SetMapConsistencyTest {
assert(nit == 4)
assert(nfe == 4)
}
+
+ @Test
+ def test_SI8727() {
+ import scala.tools.testing.AssertUtil._
+ type NSEE = NoSuchElementException
+ val map = Map(0 -> "zero", 1 -> "one")
+ val m = map.filterKeys(i => if (map contains i) true else throw new NSEE)
+ assert{ (m contains 0) && (m get 0).nonEmpty }
+ assertThrows[NSEE]{ m contains 2 }
+ assertThrows[NSEE]{ m get 2 }
+ }
}
diff --git a/test/junit/scala/collection/immutable/StreamTest.scala b/test/junit/scala/collection/immutable/StreamTest.scala
new file mode 100644
index 0000000000..6dc1c79a48
--- /dev/null
+++ b/test/junit/scala/collection/immutable/StreamTest.scala
@@ -0,0 +1,18 @@
+package scala.collection.immutable
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import org.junit.Assert._
+
+@RunWith(classOf[JUnit4])
+class StreamTest {
+
+ @Test
+ def t6727_and_t6440(): Unit = {
+ assertTrue(Stream.continually(()).filter(_ => true).take(2) == Seq((), ()))
+ assertTrue(Stream.continually(()).filterNot(_ => false).take(2) == Seq((), ()))
+ assertTrue(Stream(1,2,3,4,5).filter(_ < 4) == Seq(1,2,3))
+ assertTrue(Stream(1,2,3,4,5).filterNot(_ > 4) == Seq(1,2,3,4))
+ }
+}
diff --git a/test/junit/scala/sys/process/t7350.scala b/test/junit/scala/sys/process/t7350.scala
new file mode 100644
index 0000000000..7f3e8897f2
--- /dev/null
+++ b/test/junit/scala/sys/process/t7350.scala
@@ -0,0 +1,298 @@
+
+package scala.sys.process
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import java.io.{InputStream, OutputStream, PipedInputStream, PipedOutputStream, ByteArrayInputStream,
+ ByteArrayOutputStream, IOException, Closeable}
+import java.lang.reflect.InvocationTargetException
+import scala.concurrent.{Await, Future}
+import scala.concurrent.duration.{Duration, SECONDS}
+import scala.concurrent.ExecutionContext.Implicits.global
+import scala.util.control.Exception.ignoring
+
+// Each test normally ends in a moment, but for failure cases, waits until one second.
+
+@RunWith(classOf[JUnit4])
+class PipedProcessTest {
+ class ProcessMock(error: Boolean) extends Process {
+ var destroyCount = 0
+ def exitValue(): Int = {
+ if (error) {
+ throw new InterruptedException()
+ }
+ 0
+ }
+ def destroy(): Unit = { destroyCount += 1 }
+ }
+
+ class ProcessBuilderMock(process: Process, error: Boolean) extends ProcessBuilder.AbstractBuilder {
+ override def run(io: ProcessIO): Process = {
+ if (error) {
+ throw new IOException()
+ }
+ process
+ }
+ }
+
+ class PipeSinkMock extends Process.PipeSink("PipeSinkMock") {
+ var releaseCount = 0
+ override val pipe = null
+ override val sink = null
+ override def run(): Unit = {}
+ override def connectOut(out: OutputStream): Unit = {}
+ override def connectIn(pipeOut: PipedOutputStream): Unit = {}
+ override def release(): Unit = { releaseCount += 1 }
+ }
+
+ class PipeSourceMock extends Process.PipeSource("PipeSourceMock") {
+ var releaseCount = 0
+ override val pipe = null
+ override val source = null
+ override def run(): Unit = {}
+ override def connectIn(in: InputStream): Unit = {}
+ override def connectOut(sink: Process.PipeSink): Unit = {}
+ override def release(): Unit = { releaseCount += 1 }
+ }
+
+ class PipedProcesses(a: ProcessBuilder, b: ProcessBuilder, defaultIO: ProcessIO, toError: Boolean)
+ extends Process.PipedProcesses(a, b, defaultIO, toError) {
+ def callRunAndExitValue(source: Process.PipeSource, sink: Process.PipeSink) = {
+ val m = classOf[Process.PipedProcesses].getDeclaredMethod("runAndExitValue", classOf[Process.PipeSource], classOf[Process.PipeSink])
+ m.setAccessible(true)
+ try m.invoke(this, source, sink).asInstanceOf[Option[Int]]
+ catch {
+ case err: InvocationTargetException => throw err.getTargetException
+ }
+ }
+ }
+
+ // PipedProcesses need not to release resources when it normally end
+ @Test
+ def normallyEnd() {
+ val io = BasicIO(false, ProcessLogger(_ => ()))
+ val source = new PipeSourceMock
+ val sink = new PipeSinkMock
+ val a = new ProcessMock(error = false)
+ val b = new ProcessMock(error = false)
+ val p = new PipedProcesses(new ProcessBuilderMock(a, error = false), new ProcessBuilderMock(b, error = false), io, false)
+ val f = Future {
+ p.callRunAndExitValue(source, sink)
+ }
+ Await.result(f, Duration(1, SECONDS))
+ assert(source.releaseCount == 0)
+ assert(sink.releaseCount == 0)
+ assert(a.destroyCount == 0)
+ assert(b.destroyCount == 0)
+ }
+
+ // PipedProcesses must release resources when b.run() failed
+ @Test
+ def bFailed() {
+ val io = BasicIO(false, ProcessLogger(_ => ()))
+ val source = new PipeSourceMock
+ val sink = new PipeSinkMock
+ val a = new ProcessMock(error = false)
+ val b = new ProcessMock(error = false)
+ val p = new PipedProcesses(new ProcessBuilderMock(a, error = false), new ProcessBuilderMock(b, error = true), io, false)
+ val f = Future {
+ ignoring(classOf[IOException]) {
+ p.callRunAndExitValue(source, sink)
+ }
+ }
+ Await.result(f, Duration(1, SECONDS))
+ assert(source.releaseCount == 1)
+ assert(sink.releaseCount == 1)
+ assert(a.destroyCount == 0)
+ assert(b.destroyCount == 0)
+ }
+
+ // PipedProcesses must release resources when a.run() failed
+ @Test
+ def aFailed() {
+ val io = BasicIO(false, ProcessLogger(_ => ()))
+ val source = new PipeSourceMock
+ val sink = new PipeSinkMock
+ val a = new ProcessMock(error = false)
+ val b = new ProcessMock(error = false)
+ val p = new PipedProcesses(new ProcessBuilderMock(a, error = true), new ProcessBuilderMock(b, error = false), io, false)
+ val f = Future {
+ ignoring(classOf[IOException]) {
+ p.callRunAndExitValue(source, sink)
+ }
+ }
+ Await.result(f, Duration(1, SECONDS))
+ assert(source.releaseCount == 1)
+ assert(sink.releaseCount == 1)
+ assert(a.destroyCount == 0)
+ assert(b.destroyCount == 1)
+ }
+
+ // PipedProcesses must release resources when interrupted during waiting for first.exitValue()
+ @Test
+ def firstInterrupted() {
+ val io = BasicIO(false, ProcessLogger(_ => ()))
+ val source = new PipeSourceMock
+ val sink = new PipeSinkMock
+ val a = new ProcessMock(error = true)
+ val b = new ProcessMock(error = false)
+ val p = new PipedProcesses(new ProcessBuilderMock(a, error = false), new ProcessBuilderMock(b, error = false), io, false)
+ val f = Future {
+ p.callRunAndExitValue(source, sink)
+ }
+ Await.result(f, Duration(1, SECONDS))
+ assert(source.releaseCount == 1)
+ assert(sink.releaseCount == 1)
+ assert(a.destroyCount == 1)
+ assert(b.destroyCount == 1)
+ }
+
+ // PipedProcesses must release resources when interrupted during waiting for second.exitValue()
+ @Test
+ def secondInterrupted() {
+ val io = BasicIO(false, ProcessLogger(_ => ()))
+ val source = new PipeSourceMock
+ val sink = new PipeSinkMock
+ val a = new ProcessMock(error = false)
+ val b = new ProcessMock(error = true)
+ val p = new PipedProcesses(new ProcessBuilderMock(a, error = false), new ProcessBuilderMock(b, error = false), io, false)
+ val f = Future {
+ p.callRunAndExitValue(source, sink)
+ }
+ Await.result(f, Duration(1, SECONDS))
+ assert(source.releaseCount == 1)
+ assert(sink.releaseCount == 1)
+ assert(a.destroyCount == 1)
+ assert(b.destroyCount == 1)
+ }
+}
+
+@RunWith(classOf[JUnit4])
+class PipeSourceSinkTest {
+ def throwsIOException(f: => Unit) = {
+ try { f; false }
+ catch { case _: IOException => true }
+ }
+
+ class PipeSink extends Process.PipeSink("TestPipeSink") {
+ def ensureRunloopStarted() = {
+ while (sink.size() > 0) {
+ Thread.sleep(1)
+ }
+ }
+ def isReleased = {
+ val field = classOf[Process.PipeSink].getDeclaredField("pipe")
+ field.setAccessible(true)
+ val pipe = field.get(this).asInstanceOf[PipedInputStream]
+ !this.isAlive && throwsIOException { pipe.read() }
+ }
+ }
+
+ class PipeSource extends Process.PipeSource("TestPipeSource") {
+ def ensureRunloopStarted() = {
+ while (source.size() > 0) {
+ Thread.sleep(1)
+ }
+ }
+ def isReleased = {
+ val field = classOf[Process.PipeSource].getDeclaredField("pipe")
+ field.setAccessible(true)
+ val pipe = field.get(this).asInstanceOf[PipedOutputStream]
+ !this.isAlive && throwsIOException { pipe.write(1) }
+ }
+ }
+
+ trait CloseChecking extends Closeable {
+ var closed = false
+ override def close() = closed = true
+ }
+ class DebugOutputStream extends ByteArrayOutputStream with CloseChecking
+ class DebugInputStream(s: String) extends ByteArrayInputStream(s.getBytes()) with CloseChecking
+ class DebugInfinityInputStream extends InputStream with CloseChecking {
+ def read() = 1
+ }
+
+ def sourceSink() = {
+ val source = new PipeSource
+ val sink = new PipeSink
+ source connectOut sink
+ source.start()
+ sink.start()
+ (source, sink)
+ }
+
+ // PipeSource and PipeSink must release resources when it normally end
+ @Test
+ def normallyEnd() {
+ val in = new DebugInputStream("aaa")
+ val (source, sink) = sourceSink()
+ val out = new DebugOutputStream
+ source connectIn in
+ sink connectOut out
+ val f = Future {
+ source.join()
+ sink.join()
+ }
+ Await.result(f, Duration(1, SECONDS))
+ assert(in.closed == true)
+ assert(out.closed == true)
+ assert(source.isReleased == true)
+ assert(sink.isReleased == true)
+ }
+
+ // PipeSource and PipeSink must release resources when interrupted during waiting for source.take()
+ @Test
+ def sourceInterrupted() {
+ val (source, sink) = sourceSink()
+ val out = new DebugOutputStream
+ sink connectOut out
+ val f = Future {
+ sink.ensureRunloopStarted()
+ source.release()
+ sink.release()
+ }
+ Await.result(f, Duration(1, SECONDS))
+ assert(out.closed == true)
+ assert(source.isReleased == true)
+ assert(sink.isReleased == true)
+ }
+
+ // PipeSource and PipeSink must release resources when interrupted during waiting for sink.take()
+ @Test
+ def sinkInterrupted() {
+ val in = new DebugInputStream("aaa")
+ val (source, sink) = sourceSink()
+ source connectIn in
+ val f = Future {
+ source.ensureRunloopStarted()
+ source.release()
+ sink.release()
+ }
+ Await.result(f, Duration(1, SECONDS))
+ assert(in.closed == true)
+ assert(source.isReleased == true)
+ assert(sink.isReleased == true)
+ }
+
+ // PipeSource and PipeSink must release resources when interrupted during copy streams"
+ @Test
+ def runloopInterrupted() {
+ val in = new DebugInfinityInputStream
+ val (source, sink) = sourceSink()
+ val out = new DebugOutputStream
+ source connectIn in
+ sink connectOut out
+ val f = Future {
+ source.ensureRunloopStarted()
+ sink.ensureRunloopStarted()
+ source.release()
+ sink.release()
+ }
+ Await.result(f, Duration(1, SECONDS))
+ assert(in.closed == true)
+ assert(out.closed == true)
+ assert(source.isReleased == true)
+ assert(sink.isReleased == true)
+ }
+}
diff --git a/test/scaladoc/run/t7905.check b/test/scaladoc/run/t7905.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/t7905.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/t7905.scala b/test/scaladoc/run/t7905.scala
new file mode 100644
index 0000000000..8570724470
--- /dev/null
+++ b/test/scaladoc/run/t7905.scala
@@ -0,0 +1,36 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+ override def code = """
+ object A {
+ val foo = new B {
+ val bar = new C {
+ val baz: A.this.type = A.this
+ }
+ }
+ }
+
+ trait B {
+ type E = bar.D
+
+ val bar: C
+ }
+
+ trait C {
+ trait D
+ }
+
+ trait G {
+ type F = A.foo.E
+
+ def m(f: F) = f match {
+ case _: A.foo.bar.D => // error here
+ }
+ }
+ """
+
+ def scaladocSettings = ""
+
+ def testModel(root: Package) = ()
+}
diff --git a/test/scaladoc/scalacheck/HtmlFactoryTest.scala b/test/scaladoc/scalacheck/HtmlFactoryTest.scala
index da0f253a37..fc190b188c 100644
--- a/test/scaladoc/scalacheck/HtmlFactoryTest.scala
+++ b/test/scaladoc/scalacheck/HtmlFactoryTest.scala
@@ -779,6 +779,11 @@ object Test extends Properties("HtmlFactory") {
linkElement \@ "href" == expectedUrl && linkElement \@ "target" == "_top"
}
+ def assertValuesLink(memberName: String, expectedUrl: String): Boolean = {
+ val linkElement: NodeSeq = node \\ "div" \@ ("class", "values members") \\ "li" \@ ("name", memberName) \\ "span" \@ ("class", "permalink") \ "a"
+ linkElement \@ "href" == expectedUrl && linkElement \@ "target" == "_top"
+ }
+
}
val files = createTemplates("SI-8144.scala")
@@ -791,12 +796,12 @@ object Test extends Properties("HtmlFactory") {
property("SI-8144: Members' permalink - package") = check("some/package.html") { node =>
("type link" |: node.assertTypeLink("../index.html#some.package")) &&
- ("member: some.pack" |: node.assertMemberLink("values")("some.pack", "../index.html#some.package@pack"))
+ ("member: some.pack" |: node.assertValuesLink("some.pack", "../index.html#some.package@pack"))
}
property("SI-8144: Members' permalink - inner package") = check("some/pack/package.html") { node =>
("type link" |: node.assertTypeLink("../../index.html#some.pack.package")) &&
- ("member: SomeType (object)" |: node.assertMemberLink("values")("some.pack.SomeType", "../../index.html#some.pack.package@SomeType")) &&
+ ("member: SomeType (object)" |: node.assertValuesLink("some.pack.SomeType", "../../index.html#some.pack.package@SomeType")) &&
("member: SomeType (class)" |: node.assertMemberLink("types")("some.pack.SomeType", "../../index.html#some.pack.package@SomeTypeextendsAnyRef"))
}
@@ -809,8 +814,8 @@ object Test extends Properties("HtmlFactory") {
("type link" |: node.assertTypeLink("../../index.html#some.pack.SomeType")) &&
("constructor " |: node.assertMemberLink("constructors")("some.pack.SomeType#<init>", "../../index.html#some.pack.SomeType@<init>(arg:String):some.pack.SomeType")) &&
( "member: type TypeAlias" |: node.assertMemberLink("types")("some.pack.SomeType.TypeAlias", "../../index.html#some.pack.SomeType@TypeAlias=String")) &&
- ( "member: def >#<():Int " |: node.assertMemberLink("values")("some.pack.SomeType#>#<", "../../index.html#some.pack.SomeType@>#<():Int")) &&
- ( "member: def >@<():TypeAlias " |: node.assertMemberLink("values")("some.pack.SomeType#>@<", "../../index.html#some.pack.SomeType@>@<():SomeType.this.TypeAlias"))
+ ( "member: def >#<():Int " |: node.assertValuesLink("some.pack.SomeType#>#<", "../../index.html#some.pack.SomeType@>#<():Int")) &&
+ ( "member: def >@<():TypeAlias " |: node.assertValuesLink("some.pack.SomeType#>@<", "../../index.html#some.pack.SomeType@>@<():SomeType.this.TypeAlias"))
}
}
diff --git a/versions.properties b/versions.properties
index 454df6b757..c334629d20 100644
--- a/versions.properties
+++ b/versions.properties
@@ -1,10 +1,10 @@
-#Wed, 23 Jul 2014 08:37:26 +0200
+#Tue, 20 May 2014 10:01:37 +0200
# NOTE: this file determines the content of the scala-distribution
# via scala-dist-pom.xml and scala-library-all-pom.xml
# when adding new properties that influence a release,
# also add them to the update.versions mechanism in build.xml,
# which is used by scala-release-2.11.x in scala/jenkins-scripts
-starr.version=2.11.2
+starr.version=2.11.1
starr.use.released=1
# These are the versions of the modules that go with this release.
@@ -14,21 +14,21 @@ starr.use.released=1
scala.binary.version=2.11
# e.g. 2.11.0-RC1, 2.11.0, 2.11.1-RC1, 2.11.1
# this defines the dependency on scala-continuations-plugin in scala-dist's pom
-scala.full.version=2.11.2
+scala.full.version=2.11.1
# external modules shipped with distribution, as specified by scala-library-all's pom
scala-xml.version.number=1.0.2
-scala-parser-combinators.version.number=1.0.2
+scala-parser-combinators.version.number=1.0.1
scala-continuations-plugin.version.number=1.0.2
scala-continuations-library.version.number=1.0.2
scala-swing.version.number=1.0.1
-akka-actor.version.number=2.3.4
+akka-actor.version.number=2.3.3
actors-migration.version.number=1.1.0
jline.version=2.12
# external modules, used internally (not shipped)
-partest.version.number=1.0.1
-scalacheck.version.number=1.11.4
+partest.version.number=1.0.0
+scalacheck.version.number=1.11.3
# TODO: modularize the compiler
#scala-compiler-doc.version.number=1.0.0-RC1