summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorAleksandar Pokopec <aleksandar.prokopec@epfl.ch>2011-07-26 17:06:49 +0000
committerAleksandar Pokopec <aleksandar.prokopec@epfl.ch>2011-07-26 17:06:49 +0000
commit68031b3af11a2f79b186607f44d5c327051d19bd (patch)
treeb0381baa1311819156faa5e7dce45f946b6fd38d /src
parent1ad15b1f50364dc42e06117bf551159adea25312 (diff)
downloadscala-68031b3af11a2f79b186607f44d5c327051d19bd.tar.gz
scala-68031b3af11a2f79b186607f44d5c327051d19bd.tar.bz2
scala-68031b3af11a2f79b186607f44d5c327051d19bd.zip
Adding the missing ParMap and GenMap methods.
No review.
Diffstat (limited to 'src')
-rw-r--r--src/library/scala/collection/GenMap.scala2
-rw-r--r--src/library/scala/collection/GenMapLike.scala68
-rw-r--r--src/library/scala/collection/parallel/ParMap.scala13
-rw-r--r--src/library/scala/collection/parallel/ParMapLike.scala99
-rw-r--r--src/library/scala/collection/parallel/immutable/ParMap.scala35
-rw-r--r--src/library/scala/collection/parallel/mutable/ParMap.scala39
6 files changed, 253 insertions, 3 deletions
diff --git a/src/library/scala/collection/GenMap.scala b/src/library/scala/collection/GenMap.scala
index 0556069371..bee02d4658 100644
--- a/src/library/scala/collection/GenMap.scala
+++ b/src/library/scala/collection/GenMap.scala
@@ -23,6 +23,8 @@ extends GenMapLike[A, B, GenMap[A, B]]
with GenIterable[(A, B)]
{
def seq: Map[A, B]
+
+ def updated [B1 >: B](key: A, value: B1): GenMap[A, B1]
}
diff --git a/src/library/scala/collection/GenMapLike.scala b/src/library/scala/collection/GenMapLike.scala
index 2bbcc8f4f5..6060087d54 100644
--- a/src/library/scala/collection/GenMapLike.scala
+++ b/src/library/scala/collection/GenMapLike.scala
@@ -29,10 +29,78 @@ trait GenMapLike[A, +B, +Repr] extends GenIterableLike[(A, B), Repr] with Equals
def +[B1 >: B](kv: (A, B1)): GenMap[A, B1]
def - (key: A): Repr
+
// This hash code must be symmetric in the contents but ought not
// collide trivially.
override def hashCode() = util.MurmurHash.symmetricHash(seq, Map.hashSeed)
+ /** Returns the value associated with a key, or a default value if the key is not contained in the map.
+ * @param key the key.
+ * @param default a computation that yields a default value in case no binding for `key` is
+ * found in the map.
+ * @tparam B1 the result type of the default computation.
+ * @return the value associated with `key` if it exists,
+ * otherwise the result of the `default` computation.
+ * @usecase def getOrElse(key: A, default: => B): B
+ */
+ def getOrElse[B1 >: B](key: A, default: => B1): B1
+
+ /** Tests whether this map contains a binding for a key.
+ *
+ * @param key the key
+ * @return `true` if there is a binding for `key` in this map, `false` otherwise.
+ */
+ def contains(key: A): Boolean
+
+ /** Tests whether this map contains a binding for a key. This method,
+ * which implements an abstract method of trait `PartialFunction`,
+ * is equivalent to `contains`.
+ *
+ * @param key the key
+ * @return `true` if there is a binding for `key` in this map, `false` otherwise.
+ */
+ def isDefinedAt(key: A): Boolean
+
+ def keySet: GenSet[A]
+
+ /** Collects all keys of this map in an iterable collection.
+ *
+ * @return the keys of this map as an iterable.
+ */
+ def keys: GenIterable[A]
+
+ /** Collects all values of this map in an iterable collection.
+ *
+ * @return the values of this map as an iterable.
+ */
+ def values: GenIterable[B]
+
+ /** Creates an iterator for all keys.
+ *
+ * @return an iterator over all keys.
+ */
+ def keysIterator: Iterator[A]
+
+ /** Creates an iterator for all values in this map.
+ *
+ * @return an iterator over all values that are associated with some key in this map.
+ */
+ def valuesIterator: Iterator[B]
+
+ /** Filters this map by retaining only keys satisfying a predicate.
+ * @param p the predicate used to test keys
+ * @return an immutable map consisting only of those key value pairs of this map where the key satisfies
+ * the predicate `p`. The resulting map wraps the original map without copying any elements.
+ */
+ def filterKeys(p: A => Boolean): GenMap[A, B]
+
+ /** Transforms this map by applying a function to every retrieved value.
+ * @param f the function used to transform values of this map.
+ * @return a map view which maps every key of this map
+ * to `f(this(key))`. The resulting map wraps the original map without copying any elements.
+ */
+ def mapValues[C](f: B => C): GenMap[A, C]
+
/** Compares two maps structurally; i.e. checks if all mappings
* contained in this map are also contained in the other map,
* and vice versa.
diff --git a/src/library/scala/collection/parallel/ParMap.scala b/src/library/scala/collection/parallel/ParMap.scala
index c696099007..58197ab2c6 100644
--- a/src/library/scala/collection/parallel/ParMap.scala
+++ b/src/library/scala/collection/parallel/ParMap.scala
@@ -50,6 +50,10 @@ self =>
def empty: ParMap[K, V] = new mutable.ParHashMap[K, V]
override def stringPrefix = "ParMap"
+
+ override def updated [U >: V](key: K, value: U): ParMap[K, U] = this + ((key, value))
+
+ def + [U >: V](kv: (K, U)): ParMap[K, U]
}
@@ -61,6 +65,15 @@ object ParMap extends ParMapFactory[ParMap] {
implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParMap[K, V]] = new CanCombineFromMap[K, V]
+ /** An abstract shell used by { mutable, immutable }.Map but not by collection.Map
+ * because of variance issues.
+ */
+ abstract class WithDefault[A, +B](underlying: ParMap[A, B], d: A => B) extends ParMap[A, B] {
+ override def size = underlying.size
+ def get(key: A) = underlying.get(key)
+ def splitter = underlying.splitter
+ override def default(key: A): B = d(key)
+ }
}
diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala
index 5b855468c4..beb50a41e1 100644
--- a/src/library/scala/collection/parallel/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/ParMapLike.scala
@@ -16,9 +16,9 @@ import scala.collection.MapLike
import scala.collection.GenMapLike
import scala.collection.Map
import scala.collection.mutable.Builder
-
-
-
+import annotation.unchecked.uncheckedVariance
+import scala.collection.generic.IdleSignalling
+import scala.collection.generic.Signalling
@@ -53,6 +53,99 @@ self =>
case None => default(key)
}
+ def getOrElse[U >: V](key: K, default: => U): U = get(key) match {
+ case Some(v) => v
+ case None => default
+ }
+
+ def contains(key: K): Boolean = get(key).isDefined
+
+ def isDefinedAt(key: K): Boolean = contains(key)
+
+ private[this] def keysIterator(s: IterableSplitter[(K, V)] @uncheckedVariance): IterableSplitter[K] =
+ new IterableSplitter[K] {
+ i =>
+ val iter = s
+ var signalDelegate: Signalling = IdleSignalling
+ def hasNext = iter.hasNext
+ def next() = iter.next._1
+ def split = {
+ val ss = iter.split.map(keysIterator(_))
+ ss.foreach { _.signalDelegate = i.signalDelegate }
+ ss
+ }
+ def remaining = iter.remaining
+ def dup = keysIterator(iter.dup)
+ }
+
+ def keysIterator: IterableSplitter[K] = keysIterator(splitter)
+
+ private[this] def valuesIterator(s: IterableSplitter[(K, V)] @uncheckedVariance): IterableSplitter[V] =
+ new IterableSplitter[V] {
+ i =>
+ val iter = s
+ var signalDelegate: Signalling = IdleSignalling
+ def hasNext = iter.hasNext
+ def next() = iter.next._2
+ def split = {
+ val ss = iter.split.map(valuesIterator(_))
+ ss.foreach { _.signalDelegate = i.signalDelegate }
+ ss
+ }
+ def remaining = iter.remaining
+ def dup = valuesIterator(iter.dup)
+ }
+
+ def valuesIterator: IterableSplitter[V] = valuesIterator(splitter)
+
+ protected class DefaultKeySet extends ParSet[K] {
+ def contains(key : K) = self.contains(key)
+ def splitter = keysIterator(self.splitter)
+ def + (elem: K): ParSet[K] =
+ (ParSet[K]() ++ this + elem).asInstanceOf[ParSet[K]] // !!! concrete overrides abstract problem
+ def - (elem: K): ParSet[K] =
+ (ParSet[K]() ++ this - elem).asInstanceOf[ParSet[K]] // !!! concrete overrides abstract problem
+ override def size = self.size
+ override def foreach[S](f: K => S) = for ((k, v) <- self) f(k)
+ override def seq = self.seq.keySet
+ }
+
+ protected class DefaultValuesIterable extends ParIterable[V] {
+ def splitter = valuesIterator(self.splitter)
+ override def size = self.size
+ override def foreach[S](f: V => S) = for ((k, v) <- self) f(v)
+ def seq = self.seq.values
+ }
+
+ def keySet: ParSet[K] = new DefaultKeySet
+
+ def keys: ParIterable[K] = keySet
+
+ def values: ParIterable[V] = new DefaultValuesIterable
+
+ def filterKeys(p: K => Boolean): ParMap[K, V] = new ParMap[K, V] {
+ lazy val filtered = self.filter(kv => p(kv._1))
+ override def foreach[S](f: ((K, V)) => S): Unit = for (kv <- self) if (p(kv._1)) f(kv)
+ def splitter = filtered.splitter
+ override def contains(key: K) = self.contains(key) && p(key)
+ def get(key: K) = if (!p(key)) None else self.get(key)
+ def seq = self.seq.filterKeys(p)
+ def size = filtered.size
+ def + [U >: V](kv: (K, U)): ParMap[K, U] = ParMap[K, U]() ++ this + kv
+ def - (key: K): ParMap[K, V] = ParMap[K, V]() ++ this - key
+ }
+
+ def mapValues[S](f: V => S): ParMap[K, S] = new ParMap[K, S] {
+ override def foreach[Q](g: ((K, S)) => Q): Unit = for ((k, v) <- self) g((k, f(v)))
+ def splitter = self.splitter.map(kv => (kv._1, f(kv._2)))
+ override def size = self.size
+ override def contains(key: K) = self.contains(key)
+ def get(key: K) = self.get(key).map(f)
+ def seq = self.seq.mapValues(f)
+ def + [U >: S](kv: (K, U)): ParMap[K, U] = ParMap[K, U]() ++ this + kv
+ def - (key: K): ParMap[K, S] = ParMap[K, S]() ++ this - key
+ }
+
// note - should not override toMap (could be mutable)
}
diff --git a/src/library/scala/collection/parallel/immutable/ParMap.scala b/src/library/scala/collection/parallel/immutable/ParMap.scala
index 9f58ddb0d7..5060b36e7a 100644
--- a/src/library/scala/collection/parallel/immutable/ParMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParMap.scala
@@ -43,6 +43,30 @@ self =>
override def stringPrefix = "ParMap"
override def toMap[P, Q](implicit ev: (K, V) <:< (P, Q)): ParMap[P, Q] = this.asInstanceOf[ParMap[P, Q]]
+
+ override def updated [U >: V](key: K, value: U): ParMap[K, U] = this + ((key, value))
+
+ def + [U >: V](kv: (K, U)): ParMap[K, U]
+
+ /** The same map with a given default function.
+ * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefault`.
+ *
+ * Invoking transformer methods (e.g. `map`) will not preserve the default value.
+ *
+ * @param d the function mapping keys to values, used for non-present keys
+ * @return a wrapper of the map with a default value
+ */
+ def withDefault[U >: V](d: K => U): collection.parallel.immutable.ParMap[K, U] = new ParMap.WithDefault[K, U](this, d)
+
+ /** The same map with a given default value.
+ *
+ * Invoking transformer methods (e.g. `map`) will not preserve the default value.
+ *
+ * @param d the function mapping keys to values, used for non-present keys
+ * @return a wrapper of the map with a default value
+ */
+ def withDefaultValue[U >: V](d: U): collection.parallel.immutable.ParMap[K, U] = new ParMap.WithDefault[K, U](this, x => d)
+
}
@@ -54,4 +78,15 @@ object ParMap extends ParMapFactory[ParMap] {
implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParMap[K, V]] = new CanCombineFromMap[K, V]
+ class WithDefault[K, +V](underlying: ParMap[K, V], d: K => V)
+ extends collection.parallel.ParMap.WithDefault[K, V](underlying, d) with ParMap[K, V] {
+ override def empty = new WithDefault(underlying.empty, d)
+ override def updated[U >: V](key: K, value: U): WithDefault[K, U] = new WithDefault[K, U](underlying.updated[U](key, value), d)
+ override def + [U >: V](kv: (K, U)): WithDefault[K, U] = updated(kv._1, kv._2)
+ override def - (key: K): WithDefault[K, V] = new WithDefault(underlying - key, d)
+ override def withDefault[U >: V](d: K => U): ParMap[K, U] = new WithDefault[K, U](underlying, d)
+ override def withDefaultValue[U >: V](d: U): ParMap[K, U] = new WithDefault[K, U](underlying, x => d)
+ override def seq = underlying.seq.withDefault(d)
+ }
+
}
diff --git a/src/library/scala/collection/parallel/mutable/ParMap.scala b/src/library/scala/collection/parallel/mutable/ParMap.scala
index 91c2b3d2b2..4b3eae4ad1 100644
--- a/src/library/scala/collection/parallel/mutable/ParMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParMap.scala
@@ -43,6 +43,27 @@ extends collection/*.mutable*/.GenMap[K, V]
def seq: collection.mutable.Map[K, V]
+ override def updated [U >: V](key: K, value: U): ParMap[K, U] = this + ((key, value))
+
+ /** The same map with a given default function.
+ * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefault`.
+ *
+ * Invoking transformer methods (e.g. `map`) will not preserve the default value.
+ *
+ * @param d the function mapping keys to values, used for non-present keys
+ * @return a wrapper of the map with a default value
+ */
+ def withDefault(d: K => V): collection.parallel.mutable.ParMap[K, V] = new ParMap.WithDefault[K, V](this, d)
+
+ /** The same map with a given default value.
+ *
+ * Invoking transformer methods (e.g. `map`) will not preserve the default value.
+ *
+ * @param d the function mapping keys to values, used for non-present keys
+ * @return a wrapper of the map with a default value
+ */
+ def withDefaultValue(d: V): collection.parallel.mutable.ParMap[K, V] = new ParMap.WithDefault[K, V](this, x => d)
+
}
@@ -54,6 +75,24 @@ object ParMap extends ParMapFactory[ParMap] {
implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParMap[K, V]] = new CanCombineFromMap[K, V]
+ class WithDefault[K, V](underlying: ParMap[K, V], d: K => V)
+ extends collection.parallel.ParMap.WithDefault(underlying, d) with ParMap[K, V] {
+ override def += (kv: (K, V)) = {underlying += kv; this}
+ def -= (key: K) = {underlying -= key; this}
+ override def empty = new WithDefault(underlying.empty, d)
+ override def updated[U >: V](key: K, value: U): WithDefault[K, U] = new WithDefault[K, U](underlying.updated[U](key, value), d)
+ override def + [U >: V](kv: (K, U)): WithDefault[K, U] = updated(kv._1, kv._2)
+ override def - (key: K): WithDefault[K, V] = new WithDefault(underlying - key, d)
+ override def seq = underlying.seq.withDefault(d)
+ def clear() = underlying.clear()
+ def put(key: K, value: V): Option[V] = underlying.put(key, value)
+
+ /** If these methods aren't overridden to thread through the underlying map,
+ * successive calls to withDefault* have no effect.
+ */
+ override def withDefault(d: K => V): ParMap[K, V] = new WithDefault[K, V](underlying, d)
+ override def withDefaultValue(d: V): ParMap[K, V] = new WithDefault[K, V](underlying, x => d)
+ }
}