summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJason Zaugg <jzaugg@gmail.com>2016-11-22 16:11:57 +1000
committerGitHub <noreply@github.com>2016-11-22 16:11:57 +1000
commit9c5d3f81a667917b6a3aed5098182623c417c137 (patch)
tree65210d12a286479a37756f191059c3cb59436ca5
parent8cdd46fb86f5e516c2550a665fe592f8e6b816fd (diff)
parentb67ca7dc6bb84758f9c9f64d68b0b11c20995aa0 (diff)
downloadscala-9c5d3f81a667917b6a3aed5098182623c417c137.tar.gz
scala-9c5d3f81a667917b6a3aed5098182623c417c137.tar.bz2
scala-9c5d3f81a667917b6a3aed5098182623c417c137.zip
Merge pull request #5528 from paplorinc/getOrElseUpdate
Changed HashMap.getOrElseUpdate to only calculate the index once
-rw-r--r--src/library/scala/collection/mutable/HashMap.scala31
-rw-r--r--test/benchmarks/README.md7
-rw-r--r--test/benchmarks/build.sbt4
-rw-r--r--test/benchmarks/project/plugins.sbt2
-rw-r--r--test/benchmarks/src/main/scala/scala/collection/immutable/VectorMapBenchmark.scala32
-rw-r--r--test/benchmarks/src/main/scala/scala/collection/mutable/HashMapBenchmark.scala70
6 files changed, 138 insertions, 8 deletions
diff --git a/src/library/scala/collection/mutable/HashMap.scala b/src/library/scala/collection/mutable/HashMap.scala
index eab4202353..11ff1f0893 100644
--- a/src/library/scala/collection/mutable/HashMap.scala
+++ b/src/library/scala/collection/mutable/HashMap.scala
@@ -72,6 +72,37 @@ extends AbstractMap[A, B]
else Some(e.value)
}
+ override def getOrElseUpdate(key: A, defaultValue: => B): B = {
+ val i = index(elemHashCode(key))
+ val entry = findEntry(key, i)
+ if (entry != null) entry.value
+ else addEntry(createNewEntry(key, defaultValue), i)
+ }
+
+ /* inlined HashTable.findEntry0 to preserve its visibility */
+ private[this] def findEntry(key: A, h: Int): Entry = {
+ var e = table(h).asInstanceOf[Entry]
+ while (notFound(key, e))
+ e = e.next
+ e
+ }
+ private[this] def notFound(key: A, e: Entry): Boolean = (e != null) && !elemEquals(e.key, key)
+
+ /* inlined HashTable.addEntry0 to preserve its visibility */
+ private[this] def addEntry(e: Entry, h: Int): B = {
+ if (tableSize >= threshold) addEntry(e)
+ else addEntry0(e, h)
+ e.value
+ }
+
+ /* extracted to make addEntry inlinable */
+ private[this] def addEntry0(e: Entry, h: Int) {
+ e.next = table(h).asInstanceOf[Entry]
+ table(h) = e
+ tableSize += 1
+ nnSizeMapAdd(h)
+ }
+
override def put(key: A, value: B): Option[B] = {
val e = findOrAddEntry(key, value)
if (e eq null) None
diff --git a/test/benchmarks/README.md b/test/benchmarks/README.md
index 370d610bc4..6c77b83605 100644
--- a/test/benchmarks/README.md
+++ b/test/benchmarks/README.md
@@ -5,9 +5,7 @@ that makes use of the [SBT plugin](https://github.com/ktoso/sbt-jmh) for [JMH](h
## Running a benchmark
-The benchmarks require first building Scala into `../../build/pack` with `ant`.
-If you want to build with `sbt dist/mkPack` instead,
-you'll need to change `scalaHome` in this project.
+The benchmarks require first building Scala into `../../build/pack`.
You'll then need to know the fully-qualified name of the benchmark runner class.
The benchmarking classes are organized under `src/main/scala`,
@@ -18,8 +16,7 @@ Using this example, one would simply run
jmh:runMain scala.collection.mutable.OpenHashMapRunner
-in SBT.
-SBT should be run _from this directory_.
+in SBT, run _from this directory_ (`test/benchmarks`).
The JMH results can be found under `target/jmh-results/`.
`target` gets deleted on an SBT `clean`,
diff --git a/test/benchmarks/build.sbt b/test/benchmarks/build.sbt
index fb05fb2c99..ef603e18b3 100644
--- a/test/benchmarks/build.sbt
+++ b/test/benchmarks/build.sbt
@@ -1,5 +1,5 @@
scalaHome := Some(file("../../build/pack"))
-scalaVersion := "2.12.0-dev"
+scalaVersion := "2.12.1-dev"
scalacOptions ++= Seq("-feature", "-opt:l:classpath")
lazy val root = (project in file(".")).
@@ -7,5 +7,5 @@ lazy val root = (project in file(".")).
settings(
name := "test-benchmarks",
version := "0.0.1",
- libraryDependencies += "org.openjdk.jol" % "jol-core" % "0.4"
+ libraryDependencies += "org.openjdk.jol" % "jol-core" % "0.6"
)
diff --git a/test/benchmarks/project/plugins.sbt b/test/benchmarks/project/plugins.sbt
index aa49ad9872..1b79ce888c 100644
--- a/test/benchmarks/project/plugins.sbt
+++ b/test/benchmarks/project/plugins.sbt
@@ -1,2 +1,2 @@
addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "4.0.0")
-addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.2.16")
+addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.2.17")
diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/VectorMapBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/VectorMapBenchmark.scala
new file mode 100644
index 0000000000..61e621dcdf
--- /dev/null
+++ b/test/benchmarks/src/main/scala/scala/collection/immutable/VectorMapBenchmark.scala
@@ -0,0 +1,32 @@
+package scala.collection.immutable
+
+import org.openjdk.jmh.annotations._
+import org.openjdk.jmh.infra._
+import org.openjdk.jmh.runner.IterationType
+import benchmark._
+import java.util.concurrent.TimeUnit
+
+@BenchmarkMode(Array(Mode.AverageTime))
+@Fork(2)
+@Threads(1)
+@Warmup(iterations = 10)
+@Measurement(iterations = 10)
+@OutputTimeUnit(TimeUnit.NANOSECONDS)
+@State(Scope.Benchmark)
+class VectorMapBenchmark {
+ @Param(Array("10", "100", "1000"))
+ var size: Int = _
+
+ var values: Vector[Any] = _
+
+ @Setup(Level.Trial) def initKeys(): Unit = {
+ values = (0 to size).map(i => (i % 4) match {
+ case 0 => i.toString
+ case 1 => i.toChar
+ case 2 => i.toDouble
+ case 3 => i.toInt
+ }).toVector
+ }
+
+ @Benchmark def groupBy = values.groupBy(_.getClass)
+}
diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/HashMapBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/HashMapBenchmark.scala
new file mode 100644
index 0000000000..3f01d154e9
--- /dev/null
+++ b/test/benchmarks/src/main/scala/scala/collection/mutable/HashMapBenchmark.scala
@@ -0,0 +1,70 @@
+package scala.collection.mutable
+
+import org.openjdk.jmh.annotations._
+import org.openjdk.jmh.infra._
+import org.openjdk.jmh.runner.IterationType
+import benchmark._
+import java.util.concurrent.TimeUnit
+
+import scala.collection.mutable
+
+@BenchmarkMode(Array(Mode.AverageTime))
+@Fork(2)
+@Threads(1)
+@Warmup(iterations = 10)
+@Measurement(iterations = 10)
+@OutputTimeUnit(TimeUnit.NANOSECONDS)
+@State(Scope.Benchmark)
+class HashMapBenchmark {
+ @Param(Array("10", "100", "1000"))
+ var size: Int = _
+
+ var existingKeys: Array[Any] = _
+ var missingKeys: Array[Any] = _
+
+ @Setup(Level.Trial) def initKeys(): Unit = {
+ existingKeys = (0 to size).map(i => (i % 4) match {
+ case 0 => i.toString
+ case 1 => i.toChar
+ case 2 => i.toDouble
+ case 3 => i.toInt
+ }).toArray
+ missingKeys = (size to 2 * size).toArray
+ }
+
+ var map = new mutable.HashMap[Any, Any]
+
+ @Setup(Level.Invocation) def initializeMutable = existingKeys.foreach(v => map.put(v, v))
+
+ @TearDown(Level.Invocation) def tearDown = map.clear()
+
+ @Benchmark def getOrElseUpdate(bh: Blackhole): Unit = {
+ var i = 0;
+ while (i < size) {
+ bh.consume(map.getOrElseUpdate(existingKeys(i), -1))
+ bh.consume(map.getOrElseUpdate(missingKeys(i), -1))
+ i += 1
+ }
+ }
+
+ @Benchmark def get(bh: Blackhole): Unit = {
+ var i = 0;
+ while (i < size) {
+ bh.consume(map.get(existingKeys(i), -1))
+ bh.consume(map.get(missingKeys(i), -1))
+ i += 1
+ }
+ }
+
+ @Benchmark def put(bh: Blackhole): Any = {
+ var map = new mutable.HashMap[Any, Any]
+
+ var i = 0;
+ while (i < size) {
+ map.put(existingKeys(i), i)
+ i += 1
+ }
+
+ map
+ }
+}