summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/compiler/scala/reflect/internal/NameManglers.scala4
-rw-r--r--src/compiler/scala/reflect/internal/SymbolTable.scala2
-rw-r--r--src/compiler/scala/reflect/internal/Symbols.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala10
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala10
-rw-r--r--src/library/scala/collection/parallel/Tasks.scala2
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashMap.scala14
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashSet.scala13
-rw-r--r--src/library/scala/collection/parallel/immutable/ParRange.scala13
-rw-r--r--src/library/scala/collection/parallel/immutable/ParVector.scala12
-rw-r--r--src/library/scala/collection/parallel/mutable/ParArray.scala4
-rw-r--r--src/library/scala/collection/parallel/mutable/ParCtrie.scala12
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashMap.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashSet.scala2
-rw-r--r--test/benchmarking/ParCtrie-map.scala2
-rw-r--r--test/files/neg/overloaded-unapply.check (renamed from test/files/neg/t960.check)6
-rw-r--r--test/files/neg/overloaded-unapply.scala (renamed from test/files/neg/t960.scala)0
17 files changed, 88 insertions, 22 deletions
diff --git a/src/compiler/scala/reflect/internal/NameManglers.scala b/src/compiler/scala/reflect/internal/NameManglers.scala
index 12f56976c9..48f21721da 100644
--- a/src/compiler/scala/reflect/internal/NameManglers.scala
+++ b/src/compiler/scala/reflect/internal/NameManglers.scala
@@ -80,9 +80,9 @@ trait NameManglers {
val TRAIT_SETTER_SEPARATOR_STRING = "$_setter_$"
val SETTER_SUFFIX: TermName = encode("_=")
- @deprecated("2.10.0", "Use SPECIALIZED_SUFFIX")
+ @deprecated("Use SPECIALIZED_SUFFIX", "2.10.0")
def SPECIALIZED_SUFFIX_STRING = SPECIALIZED_SUFFIX.toString
- @deprecated("2.10.0", "Use SPECIALIZED_SUFFIX")
+ @deprecated("Use SPECIALIZED_SUFFIX", "2.10.0")
def SPECIALIZED_SUFFIX_NAME: TermName = SPECIALIZED_SUFFIX.toTermName
def isConstructorName(name: Name) = name == CONSTRUCTOR || name == MIXIN_CONSTRUCTOR
diff --git a/src/compiler/scala/reflect/internal/SymbolTable.scala b/src/compiler/scala/reflect/internal/SymbolTable.scala
index 5ae8f5dbf4..2a5f5c5394 100644
--- a/src/compiler/scala/reflect/internal/SymbolTable.scala
+++ b/src/compiler/scala/reflect/internal/SymbolTable.scala
@@ -37,7 +37,7 @@ abstract class SymbolTable extends api.Universe
def log(msg: => AnyRef): Unit
def abort(msg: String): Nothing = throw new FatalError(supplementErrorMessage(msg))
- @deprecated("2.10.0", "Give us a reason")
+ @deprecated("Give us a reason", "2.10.0")
def abort(): Nothing = abort("unknown error")
/** Override with final implementation for inlining. */
diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala
index 853046e81a..c171ecc702 100644
--- a/src/compiler/scala/reflect/internal/Symbols.scala
+++ b/src/compiler/scala/reflect/internal/Symbols.scala
@@ -1600,7 +1600,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
else owner.logicallyEnclosingMember
/** Kept for source compatibility with 2.9. Scala IDE for Eclipse relies on this. */
- @deprecated("Use enclosingTopLevelClass")
+ @deprecated("Use enclosingTopLevelClass", "2.10.0")
def toplevelClass: Symbol = enclosingTopLevelClass
/** The top-level class containing this symbol. */
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index b7a22c6ac1..ed9fee986f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -442,7 +442,6 @@ trait ContextErrors {
def UnexpectedTreeAnnotationError(tree: Tree, unexpected: Tree) =
NormalTypeError(tree, "unexpected tree after typing annotation: "+ unexpected)
- // TODO no test case
//typedExistentialTypeTree
def AbstractionFromVolatileTypeError(vd: ValDef) =
issueNormalTypeError(vd, "illegal abstraction from value with volatile type "+vd.symbol.tpe)
@@ -465,8 +464,7 @@ trait ContextErrors {
def TooManyArgsNamesDefaultsError(tree: Tree, fun: Tree) =
NormalTypeError(tree, "too many arguments for "+treeSymTypeMsg(fun))
- // can it still happen? see test case neg/t960.scala
- // TODO no test case
+ // can it still happen? see test case neg/overloaded-unapply.scala
def OverloadedUnapplyError(tree: Tree) =
issueNormalTypeError(tree, "cannot resolve overloaded unapply")
@@ -499,7 +497,6 @@ trait ContextErrors {
}
//doTypedApply - patternMode
- // TODO: missing test case
def TooManyArgsPatternError(fun: Tree) =
NormalTypeError(fun, "too many arguments for unapply pattern, maximum = "+definitions.MaxTupleArity)
@@ -541,7 +538,6 @@ trait ContextErrors {
"illegal inheritance;\n self-type "+selfType+" does not conform to "+
parent +"'s selftype "+parent.tpe.typeOfThis)
- // TODO: missing test case
def ParentInheritedTwiceError(parent: Tree, parentSym: Symbol) =
NormalTypeError(parent, parentSym+" is inherited twice")
@@ -572,7 +568,6 @@ trait ContextErrors {
setError(tree)
}
- //TODO Needs test case
def ConstructorPrefixError(tree: Tree, restpe: Type) = {
issueNormalTypeError(tree, restpe.prefix+" is not a legal prefix for a constructor")
setError(tree)
@@ -597,7 +592,6 @@ trait ContextErrors {
setError(tree)
}
- // TODO needs test case
// cases where we do not necessarily return trees
def DependentMethodTpeConversionToFunctionError(tree: Tree, tp: Type) =
issueNormalTypeError(tree, "method with dependent type "+tp+" cannot be converted to function value")
@@ -606,11 +600,9 @@ trait ContextErrors {
def StarPatternWithVarargParametersError(tree: Tree) =
issueNormalTypeError(tree, "star patterns must correspond with varargs parameters")
- // TODO missing test case
def FinitaryError(tparam: Symbol) =
issueSymbolTypeError(tparam, "class graph is not finitary because type parameter "+tparam.name+" is expansively recursive")
- // TODO missing test case for a second case
def QualifyingClassError(tree: Tree, qual: Name) = {
issueNormalTypeError(tree,
if (qual.isEmpty) tree + " can be used only in a class, object, or template"
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 5e6bf8c1a3..7e0fa366ab 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -58,7 +58,7 @@ import annotation.unchecked.uncheckedStable
* }}}
*
* which returns an instance of `IterableSplitter[T]`, which is a subtype of `Splitter[T]`.
- * Parallel iterators have a method `remaining` to check the remaining number of elements,
+ * Splitters have a method `remaining` to check the remaining number of elements,
* and method `split` which is defined by splitters. Method `split` divides the splitters
* iterate over into disjunct subsets:
*
@@ -96,7 +96,7 @@ import annotation.unchecked.uncheckedStable
* The combination of methods `toMap`, `toSeq` or `toSet` along with `par` and `seq` is a flexible
* way to change between different collection types.
*
- * Since this trait extends the `Iterable` trait, methods like `size` must also
+ * Since this trait extends the `GenIterable` trait, methods like `size` must also
* be implemented in concrete collections, while `iterator` forwards to `splitter` by
* default.
*
@@ -116,7 +116,7 @@ import annotation.unchecked.uncheckedStable
* which do not know the number of elements remaining. To do this, the new collection implementation must override
* `isStrictSplitterCollection` to `false`. This will make some operations unavailable.
*
- * To create a new parallel collection, extend the `ParIterable` trait, and implement `size`, `parallelIterator`,
+ * To create a new parallel collection, extend the `ParIterable` trait, and implement `size`, `splitter`,
* `newCombiner` and `seq`. Having an implicit combiner factory requires extending this trait in addition, as
* well as providing a companion object, as with regular collections.
*
@@ -159,6 +159,10 @@ self: ParIterableLike[T, Repr, Sequential] =>
@volatile
private var _tasksupport = defaultTaskSupport
+ protected def initTaskSupport() {
+ _tasksupport = defaultTaskSupport
+ }
+
def tasksupport = {
val ts = _tasksupport
if (ts eq null) {
diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
index 60a8bb1ed6..4a581f219e 100644
--- a/src/library/scala/collection/parallel/Tasks.scala
+++ b/src/library/scala/collection/parallel/Tasks.scala
@@ -359,7 +359,7 @@ object ThreadPoolTasks {
/** An implementation of tasks objects based on the Java thread pooling API and synchronization using futures. */
-@deprecated("This implementation is not used.")
+@deprecated("This implementation is not used.", "2.10.0")
trait FutureThreadPoolTasks extends Tasks {
import java.util.concurrent._
diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
index 266b179401..52d6531f9e 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
@@ -116,7 +116,21 @@ self =>
def remaining = sz - i
override def toString = "HashTrieIterator(" + sz + ")"
}
+
+ /* serialization */
+
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ out.defaultWriteObject
+ }
+ private def readObject(in: java.io.ObjectInputStream) {
+ in.defaultReadObject
+
+ initTaskSupport()
+ }
+
+ /* debug */
+
private[parallel] def printDebugInfo() {
println("Parallel hash trie")
println("Top level inner trie type: " + trie.getClass)
diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
index 0d7f04976e..0a19afc426 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
@@ -111,6 +111,19 @@ self =>
}
def remaining = sz - i
}
+
+ /* serialization */
+
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ out.defaultWriteObject
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ in.defaultReadObject
+
+ initTaskSupport()
+ }
+
}
diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala
index 64e07ce4ff..364175fe41 100644
--- a/src/library/scala/collection/parallel/immutable/ParRange.scala
+++ b/src/library/scala/collection/parallel/immutable/ParRange.scala
@@ -105,6 +105,19 @@ self =>
cb
}
}
+
+ /* serialization */
+
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ out.defaultWriteObject
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ in.defaultReadObject
+
+ initTaskSupport()
+ }
+
}
object ParRange {
diff --git a/src/library/scala/collection/parallel/immutable/ParVector.scala b/src/library/scala/collection/parallel/immutable/ParVector.scala
index 5d9c431bc1..310b09a016 100644
--- a/src/library/scala/collection/parallel/immutable/ParVector.scala
+++ b/src/library/scala/collection/parallel/immutable/ParVector.scala
@@ -78,6 +78,18 @@ extends ParSeq[T]
splitted.map(v => new ParVector(v).splitter.asInstanceOf[ParVectorIterator])
}
}
+
+ /* serialization */
+
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ out.defaultWriteObject
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ in.defaultReadObject
+
+ initTaskSupport()
+ }
}
diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala
index 5c3da66be0..683b7eaa9a 100644
--- a/src/library/scala/collection/parallel/mutable/ParArray.scala
+++ b/src/library/scala/collection/parallel/mutable/ParArray.scala
@@ -672,9 +672,11 @@ self =>
private def readObject(in: java.io.ObjectInputStream) {
in.defaultReadObject
-
+
// get raw array from arrayseq
array = arrayseq.array.asInstanceOf[Array[Any]]
+
+ initTaskSupport()
}
}
diff --git a/src/library/scala/collection/parallel/mutable/ParCtrie.scala b/src/library/scala/collection/parallel/mutable/ParCtrie.scala
index 470972adad..b4dc8beb2a 100644
--- a/src/library/scala/collection/parallel/mutable/ParCtrie.scala
+++ b/src/library/scala/collection/parallel/mutable/ParCtrie.scala
@@ -115,6 +115,18 @@ extends ParMap[K, V]
override def merge(that: Size) = result = result + that.result
}
+ /* serialization */
+
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ out.defaultWriteObject
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ in.defaultReadObject
+
+ initTaskSupport()
+ }
+
}
diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
index 6ce6c45460..72526aadb1 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
@@ -106,6 +106,8 @@ self =>
private def readObject(in: java.io.ObjectInputStream) {
init[V](in, new Entry(_, _))
+
+ initTaskSupport()
}
private[parallel] override def brokenInvariants = {
diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
index e0a2ab03df..84b7c4e42c 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
@@ -84,6 +84,8 @@ extends ParSet[T]
private def readObject(in: java.io.ObjectInputStream) {
init(in, x => x)
+
+ initTaskSupport()
}
import collection.DebugUtils._
diff --git a/test/benchmarking/ParCtrie-map.scala b/test/benchmarking/ParCtrie-map.scala
index c8de99f33e..f1d2f560b7 100644
--- a/test/benchmarking/ParCtrie-map.scala
+++ b/test/benchmarking/ParCtrie-map.scala
@@ -10,7 +10,7 @@ object Map extends testing.Benchmark {
val par = sys.props("par").toInt
val parctrie = ParCtrie((0 until length) zip (0 until length): _*)
- collection.parallel.ForkJoinTasks.defaultForkJoinPool.setParallelism(par)
+ parctrie.tasksupport = new collection.parallel.ForkJoinTaskSupport(new scala.concurrent.forkjoin.ForkJoinPool(par))
def run = {
parctrie map {
diff --git a/test/files/neg/t960.check b/test/files/neg/overloaded-unapply.check
index 603b1cb032..1da93f6939 100644
--- a/test/files/neg/t960.check
+++ b/test/files/neg/overloaded-unapply.check
@@ -1,13 +1,13 @@
-t960.scala:18: error: ambiguous reference to overloaded definition,
+overloaded-unapply.scala:18: error: ambiguous reference to overloaded definition,
both method unapply in object List of type [a](xs: List[a])Option[Null]
and method unapply in object List of type [a](xs: List[a])Option[(a, List[a])]
match argument types (List[a])
case List(x, xs) => 7
^
-t960.scala:22: error: cannot resolve overloaded unapply
+overloaded-unapply.scala:22: error: cannot resolve overloaded unapply
case List(x, xs) => 7
^
-t960.scala:12: error: method unapply is defined twice in t960.scala
+overloaded-unapply.scala:12: error: method unapply is defined twice in overloaded-unapply.scala
def unapply[a](xs: List[a]): Option[Null] = xs match {
^
three errors found
diff --git a/test/files/neg/t960.scala b/test/files/neg/overloaded-unapply.scala
index 36909626c1..36909626c1 100644
--- a/test/files/neg/t960.scala
+++ b/test/files/neg/overloaded-unapply.scala