aboutsummaryrefslogtreecommitdiff
path: root/src/dotty/tools/dotc/core/Uniques.scala
diff options
context:
space:
mode:
authorMartin Odersky <odersky@gmail.com>2014-01-26 18:44:34 +0100
committerMartin Odersky <odersky@gmail.com>2014-01-26 18:52:33 +0100
commitaafa0c16dbf95fb880573dea6f9ee6db02470740 (patch)
treeaa738e536b5032261315cd2453f0393b7657b602 /src/dotty/tools/dotc/core/Uniques.scala
parent5b69fbbe68705ece7d892cbda555191853f1c5be (diff)
downloaddotty-aafa0c16dbf95fb880573dea6f9ee6db02470740.tar.gz
dotty-aafa0c16dbf95fb880573dea6f9ee6db02470740.tar.bz2
dotty-aafa0c16dbf95fb880573dea6f9ee6db02470740.zip
New treatment of uniques
To avoid to always create a type before checking its uniqueness we specialize on the three most common categories: RefinedTypes, TypeBounds and NamedTypes. Each category gets its own uniques map. Hashing is disentangled from Types. The new treatement seems to give some improvement (2-5%?) but not much.
Diffstat (limited to 'src/dotty/tools/dotc/core/Uniques.scala')
-rw-r--r--src/dotty/tools/dotc/core/Uniques.scala124
1 files changed, 124 insertions, 0 deletions
diff --git a/src/dotty/tools/dotc/core/Uniques.scala b/src/dotty/tools/dotc/core/Uniques.scala
new file mode 100644
index 000000000..765b5d73f
--- /dev/null
+++ b/src/dotty/tools/dotc/core/Uniques.scala
@@ -0,0 +1,124 @@
+package dotty.tools.dotc
+package core
+
+import Types._, Contexts._, util.Stats._, Hashable._, Names._
+import util.HashSet
+
+/** Defines operation `unique` for hash-consing types.
+ * Also defines specialized hash sets for hash consing uniques of a specific type.
+ * All sets offer a `enterIfNew` method which checks whether a type
+ * with the given parts exists already and creates a new one if not.
+ */
+object Uniques {
+
+ private def recordCaching(tp: Type): Unit = recordCaching(tp.hash, tp.getClass)
+ private def recordCaching(h: Int, clazz: Class[_]): Unit =
+ if (h == NotCached) {
+ record("uncached-types")
+ record(s"uncached: $clazz")
+ } else {
+ record("cached-types")
+ record(s"cached: $clazz")
+ }
+
+ def unique[T <: Type](tp: T)(implicit ctx: Context): T = {
+ if (monitored) recordCaching(tp)
+ if (tp.hash == NotCached) tp
+ else ctx.uniques.findEntryOrUpdate(tp).asInstanceOf[T]
+ } /* !!! DEBUG
+ ensuring (
+ result => tp.toString == result.toString || {
+ println(s"cache mismatch; tp = $tp, cached = $result")
+ false
+ }
+ )
+ */
+
+ final class NamedTypeUniques extends HashSet[NamedType]("uniqueNamedTypes", initialUniquesCapacity) with Hashable {
+ override def hash(x: NamedType): Int = x.hash
+
+ private def findPrevious(h: Int, prefix: Type, name: Name): NamedType = {
+ var e = findEntryByHash(h)
+ while (e != null) {
+ if ((e.prefix == prefix) && (e.name eq name)) return e
+ e = nextEntryByHash(h)
+ }
+ e
+ }
+
+ def enterIfNew(prefix: Type, name: Name): NamedType = {
+ val h = doHash(name, prefix)
+ if (monitored) recordCaching(h, classOf[CachedTermRef])
+ def newType =
+ if (name.isTypeName) new CachedTypeRef(prefix, name.asTypeName, h)
+ else new CachedTermRef(prefix, name.asTermName, h)
+ if (h == NotCached) newType
+ else {
+ val r = findPrevious(h, prefix, name)
+ if (r ne null) r else addEntryAfterScan(newType)
+ }
+ }
+ }
+
+ final class TypeBoundsUniques extends HashSet[TypeBounds]("uniqueTypeBounds", initialUniquesCapacity) with Hashable {
+ override def hash(x: TypeBounds): Int = x.hash
+
+ private def findPrevious(h: Int, lo: Type, hi: Type, variance: Int): TypeBounds = {
+ var e = findEntryByHash(h)
+ while (e != null) {
+ if ((e.lo == lo) && (e.hi == hi) && (e.variance == variance)) return e
+ e = nextEntryByHash(h)
+ }
+ e
+ }
+
+ def enterIfNew(lo: Type, hi: Type, variance: Int): TypeBounds = {
+ val h = doHash(variance, lo, hi)
+ if (monitored) recordCaching(h, classOf[TypeBounds])
+ def newBounds =
+ if (variance == 0) new CachedTypeBounds(lo, hi, h)
+ else if (variance == 1) new CoTypeBounds(lo, hi, h)
+ else new ContraTypeBounds(lo, hi, h)
+ if (h == NotCached) newBounds
+ else {
+ val r = findPrevious(h, lo, hi, variance)
+ if (r ne null) r else addEntryAfterScan(newBounds)
+ }
+ }
+ }
+
+ final class RefinedUniques extends HashSet[RefinedType]("uniqueRefinedTypes", initialUniquesCapacity) with Hashable {
+ override val hashSeed = classOf[CachedRefinedType].hashCode // some types start life as CachedRefinedTypes, need to have same hash seed
+ override def hash(x: RefinedType): Int = x.hash
+
+ private def findPrevious(h: Int, parent: Type, refinedName: Name, refinedInfo: Type): RefinedType = {
+ var e = findEntryByHash(h)
+ while (e != null) {
+ if ((e.parent == parent) && (e.refinedName eq refinedName) && (e.refinedInfo == refinedInfo))
+ return e
+ e = nextEntryByHash(h)
+ }
+ e
+ }
+
+ def enterIfNew(parent: Type, refinedName: Name, refinedInfo: Type): RefinedType = {
+ val h = doHash(refinedName, refinedInfo, parent)
+ def newType = new PreHashedRefinedType(parent, refinedName, refinedInfo, h)
+ if (monitored) recordCaching(h, classOf[PreHashedRefinedType])
+ if (h == NotCached) newType
+ else {
+ val r = findPrevious(h, parent, refinedName, refinedInfo)
+ if (r ne null) r else addEntryAfterScan(newType)
+ }
+ }
+
+ def enterIfNew(rt: RefinedType) = {
+ if (monitored) recordCaching(rt)
+ if (rt.hash == NotCached) rt
+ else {
+ val r = findPrevious(rt.hash, rt.parent, rt.refinedName, rt.refinedInfo)
+ if (r ne null) r else addEntryAfterScan(rt)
+ }
+ }
+ }
+} \ No newline at end of file