aboutsummaryrefslogtreecommitdiff
path: root/compiler/src/dotty/tools/dotc/util
diff options
context:
space:
mode:
authorFelix Mulder <felix.mulder@gmail.com>2016-11-02 11:08:28 +0100
committerGuillaume Martres <smarter@ubuntu.com>2016-11-22 01:35:07 +0100
commit8a61ff432543a29234193cd1f7c14abd3f3d31a0 (patch)
treea8147561d307af862c295cfc8100d271063bb0dd /compiler/src/dotty/tools/dotc/util
parent6a455fe6da5ff9c741d91279a2dc6fe2fb1b472f (diff)
downloaddotty-8a61ff432543a29234193cd1f7c14abd3f3d31a0.tar.gz
dotty-8a61ff432543a29234193cd1f7c14abd3f3d31a0.tar.bz2
dotty-8a61ff432543a29234193cd1f7c14abd3f3d31a0.zip
Move compiler and compiler tests to compiler dir
Diffstat (limited to 'compiler/src/dotty/tools/dotc/util')
-rw-r--r--compiler/src/dotty/tools/dotc/util/Attachment.scala96
-rw-r--r--compiler/src/dotty/tools/dotc/util/Chars.scala96
-rw-r--r--compiler/src/dotty/tools/dotc/util/CommentParsing.scala239
-rw-r--r--compiler/src/dotty/tools/dotc/util/DiffUtil.scala174
-rw-r--r--compiler/src/dotty/tools/dotc/util/DotClass.scala12
-rw-r--r--compiler/src/dotty/tools/dotc/util/FreshNameCreator.scala33
-rw-r--r--compiler/src/dotty/tools/dotc/util/HashSet.scala146
-rw-r--r--compiler/src/dotty/tools/dotc/util/LRUCache.scala100
-rw-r--r--compiler/src/dotty/tools/dotc/util/NameTransformer.scala163
-rw-r--r--compiler/src/dotty/tools/dotc/util/Positions.scala173
-rw-r--r--compiler/src/dotty/tools/dotc/util/Property.scala10
-rw-r--r--compiler/src/dotty/tools/dotc/util/Set.scala27
-rw-r--r--compiler/src/dotty/tools/dotc/util/ShowPickled.scala287
-rw-r--r--compiler/src/dotty/tools/dotc/util/SimpleMap.scala223
-rw-r--r--compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala28
-rw-r--r--compiler/src/dotty/tools/dotc/util/SourceFile.scala145
-rw-r--r--compiler/src/dotty/tools/dotc/util/SourcePosition.scala57
-rw-r--r--compiler/src/dotty/tools/dotc/util/Stats.scala78
-rw-r--r--compiler/src/dotty/tools/dotc/util/Util.scala32
-rw-r--r--compiler/src/dotty/tools/dotc/util/common.scala14
-rw-r--r--compiler/src/dotty/tools/dotc/util/kwords.sc18
-rw-r--r--compiler/src/dotty/tools/dotc/util/lrutest.sc40
22 files changed, 2191 insertions, 0 deletions
diff --git a/compiler/src/dotty/tools/dotc/util/Attachment.scala b/compiler/src/dotty/tools/dotc/util/Attachment.scala
new file mode 100644
index 000000000..20facfd97
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/Attachment.scala
@@ -0,0 +1,96 @@
+package dotty.tools.dotc.util
+
+/** A class inheriting from Attachment.Container supports
+ * adding, removing and lookup of attachments. Attachments are typed key/value pairs.
+ */
+object Attachment {
+ import Property.Key
+
+ /** An implementation trait for attachments.
+ * Clients should inherit from Container instead.
+ */
+ trait LinkSource {
+ private[Attachment] var next: Link[_]
+
+ /** Optionally get attachment corresponding to `key` */
+ final def getAttachment[V](key: Key[V]): Option[V] = {
+ val nx = next
+ if (nx == null) None
+ else if (nx.key eq key) Some(nx.value.asInstanceOf[V])
+ else nx.getAttachment[V](key)
+ }
+
+ /** The attachment corresponding to `key`.
+ * @throws NoSuchElementException if no attachment with key exists
+ */
+ final def attachment[V](key: Key[V]): V = {
+ val nx = next
+ if (nx == null) throw new NoSuchElementException
+ else if (nx.key eq key) nx.value.asInstanceOf[V]
+ else nx.attachment(key)
+ }
+
+ /** The attachment corresponding to `key`, or `default`
+ * if no attachment with `key` exists.
+ */
+ final def attachmentOrElse[V](key: Key[V], default: V): V = {
+ val nx = next
+ if (nx == null) default
+ else if (nx.key eq key) nx.value.asInstanceOf[V]
+ else nx.attachmentOrElse(key, default)
+ }
+
+ /** Add attachment with given `key` and `value`.
+ * @return Optionally, the old attachment with given `key` if one existed before.
+ * The new attachment is added at the position of the old one, or at the end
+ * if no attachment with same `key` existed.
+ */
+ final def putAttachment[V](key: Key[V], value: V): Option[V] = {
+ val nx = next
+ if (nx == null) {
+ next = new Link(key, value, null)
+ None
+ }
+ else if (nx.key eq key) {
+ next = new Link(key, value, nx.next)
+ Some(nx.value.asInstanceOf[V])
+ }
+ else nx.putAttachment(key, value)
+ }
+
+ /** Remove attachment with given `key`, if it exists.
+ * @return Optionally, the removed attachment with given `key` if one existed before.
+ */
+ final def removeAttachment[V](key: Key[V]): Option[V] = {
+ val nx = next
+ if (nx == null)
+ None
+ else if (nx.key eq key) {
+ next = nx.next
+ Some(nx.value.asInstanceOf[V])
+ }
+ else nx.removeAttachment(key)
+ }
+
+ /** The list of all keys and values attached to this container. */
+ final def allAttachments: List[(Key[_], Any)] = {
+ val nx = next
+ if (nx == null) Nil else (nx.key, nx.value) :: nx.allAttachments
+ }
+ }
+
+ /** A private, concrete implementation class linking attachments.
+ */
+ private[Attachment] class Link[+V](val key: Key[V], val value: V, var next: Link[_])
+ extends LinkSource
+
+ /** A trait for objects that can contain attachments */
+ trait Container extends LinkSource {
+ private[Attachment] var next: Link[_] = null
+
+ final def pushAttachment[V](key: Key[V], value: V): Unit = {
+ assert(!getAttachment(key).isDefined, s"duplicate attachment for key $key")
+ next = new Link(key, value, next)
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/util/Chars.scala b/compiler/src/dotty/tools/dotc/util/Chars.scala
new file mode 100644
index 000000000..bae3b4732
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/Chars.scala
@@ -0,0 +1,96 @@
+/* NSC -- new Scala compiler
+ * Copyright 2006-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+package dotty.tools.dotc
+package util
+
+import scala.annotation.switch
+import java.lang.{ Character => JCharacter }
+import java.lang.{Character => JCharacter}
+import java.lang.Character.LETTER_NUMBER
+import java.lang.Character.LOWERCASE_LETTER
+import java.lang.Character.OTHER_LETTER
+import java.lang.Character.TITLECASE_LETTER
+import java.lang.Character.UPPERCASE_LETTER
+
+/** Contains constants and classifier methods for characters */
+object Chars {
+
+ final val LF = '\u000A'
+ final val FF = '\u000C'
+ final val CR = '\u000D'
+ final val SU = '\u001A'
+
+ /** Convert a character digit to an Int according to given base,
+ * -1 if no success
+ */
+ def digit2int(ch: Char, base: Int): Int = {
+ val num = (
+ if (ch <= '9') ch - '0'
+ else if ('a' <= ch && ch <= 'z') ch - 'a' + 10
+ else if ('A' <= ch && ch <= 'Z') ch - 'A' + 10
+ else -1
+ )
+ if (0 <= num && num < base) num else -1
+ }
+ /** Buffer for creating '\ u XXXX' strings. */
+ private[this] val char2uescapeArray = Array[Char]('\\', 'u', 0, 0, 0, 0)
+
+ /** Convert a character to a backslash-u escape */
+ def char2uescape(c: Char): String = {
+ @inline def hexChar(ch: Int): Char =
+ (( if (ch < 10) '0' else 'A' - 10 ) + ch).toChar
+
+ char2uescapeArray(2) = hexChar((c >> 12) )
+ char2uescapeArray(3) = hexChar((c >> 8) % 16)
+ char2uescapeArray(4) = hexChar((c >> 4) % 16)
+ char2uescapeArray(5) = hexChar((c ) % 16)
+
+ new String(char2uescapeArray)
+ }
+
+ /** Is character a line break? */
+ def isLineBreakChar(c: Char) = (c: @switch) match {
+ case LF|FF|CR|SU => true
+ case _ => false
+ }
+
+ /** Is character a whitespace character (but not a new line)? */
+ def isWhitespace(c: Char) =
+ c == ' ' || c == '\t' || c == CR
+
+ /** Can character form part of a doc comment variable $xxx? */
+ def isVarPart(c: Char) =
+ '0' <= c && c <= '9' || 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z'
+
+ /** Can character start an alphanumeric Scala identifier? */
+ def isIdentifierStart(c: Char): Boolean =
+ (c == '_') || (c == '$') || Character.isUnicodeIdentifierStart(c)
+
+ /** Can character form part of an alphanumeric Scala identifier? */
+ def isIdentifierPart(c: Char) =
+ (c == '$') || Character.isUnicodeIdentifierPart(c)
+
+ /** Is character a math or other symbol in Unicode? */
+ def isSpecial(c: Char) = {
+ val chtp = Character.getType(c)
+ chtp == Character.MATH_SYMBOL.toInt || chtp == Character.OTHER_SYMBOL.toInt
+ }
+
+ private final val otherLetters = Set[Char]('\u0024', '\u005F') // '$' and '_'
+ private final val letterGroups = {
+ import JCharacter._
+ Set[Byte](LOWERCASE_LETTER, UPPERCASE_LETTER, OTHER_LETTER, TITLECASE_LETTER, LETTER_NUMBER)
+ }
+ def isScalaLetter(ch: Char) = letterGroups(JCharacter.getType(ch).toByte) || otherLetters(ch)
+
+ /** Can character form part of a Scala operator name? */
+ def isOperatorPart(c : Char) : Boolean = (c: @switch) match {
+ case '~' | '!' | '@' | '#' | '%' |
+ '^' | '*' | '+' | '-' | '<' |
+ '>' | '?' | ':' | '=' | '&' |
+ '|' | '/' | '\\' => true
+ case c => isSpecial(c)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/util/CommentParsing.scala b/compiler/src/dotty/tools/dotc/util/CommentParsing.scala
new file mode 100644
index 000000000..cc790d683
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/CommentParsing.scala
@@ -0,0 +1,239 @@
+/*
+ * Port of DocStrings.scala from nsc
+ * @author Martin Odersky
+ * @author Felix Mulder
+ */
+package dotty.tools.dotc.util
+
+/** The comment parsing in `dotc` is used by both the comment cooking and the
+ * dottydoc tool.
+ *
+ * The comment cooking is used to expand comments with `@inheritdoc` and
+ * `@define` annotations. The rest of the comment is untouched and later
+ * handled by dottydoc.
+ */
+object CommentParsing {
+ import scala.reflect.internal.Chars._
+
+ /** Returns index of string `str` following `start` skipping longest
+ * sequence of whitespace characters characters (but no newlines)
+ */
+ def skipWhitespace(str: String, start: Int): Int =
+ if (start < str.length && isWhitespace(str charAt start)) skipWhitespace(str, start + 1)
+ else start
+
+ /** Returns index of string `str` following `start` skipping
+ * sequence of identifier characters.
+ */
+ def skipIdent(str: String, start: Int): Int =
+ if (start < str.length && isIdentifierPart(str charAt start)) skipIdent(str, start + 1)
+ else start
+
+ /** Returns index of string `str` following `start` skipping
+ * sequence of identifier characters.
+ */
+ def skipTag(str: String, start: Int): Int =
+ if (start < str.length && (str charAt start) == '@') skipIdent(str, start + 1)
+ else start
+
+
+ /** Returns index of string `str` after `start` skipping longest
+ * sequence of space and tab characters, possibly also containing
+ * a single `*` character or the `/``**` sequence.
+ * @pre start == str.length || str(start) == `\n`
+ */
+ def skipLineLead(str: String, start: Int): Int =
+ if (start == str.length) start
+ else {
+ val idx = skipWhitespace(str, start + 1)
+ if (idx < str.length && (str charAt idx) == '*') skipWhitespace(str, idx + 1)
+ else if (idx + 2 < str.length && (str charAt idx) == '/' && (str charAt (idx + 1)) == '*' && (str charAt (idx + 2)) == '*')
+ skipWhitespace(str, idx + 3)
+ else idx
+ }
+
+ /** Skips to next occurrence of `\n` or to the position after the `/``**` sequence following index `start`.
+ */
+ def skipToEol(str: String, start: Int): Int =
+ if (start + 2 < str.length && (str charAt start) == '/' && (str charAt (start + 1)) == '*' && (str charAt (start + 2)) == '*') start + 3
+ else if (start < str.length && (str charAt start) != '\n') skipToEol(str, start + 1)
+ else start
+
+ /** Returns first index following `start` and starting a line (i.e. after skipLineLead) or starting the comment
+ * which satisfies predicate `p`.
+ */
+ def findNext(str: String, start: Int)(p: Int => Boolean): Int = {
+ val idx = skipLineLead(str, skipToEol(str, start))
+ if (idx < str.length && !p(idx)) findNext(str, idx)(p)
+ else idx
+ }
+
+ /** Return first index following `start` and starting a line (i.e. after skipLineLead)
+ * which satisfies predicate `p`.
+ */
+ def findAll(str: String, start: Int)(p: Int => Boolean): List[Int] = {
+ val idx = findNext(str, start)(p)
+ if (idx == str.length) List()
+ else idx :: findAll(str, idx)(p)
+ }
+
+ /** Produces a string index, which is a list of `sections`, i.e
+ * pairs of start/end positions of all tagged sections in the string.
+ * Every section starts with an at sign and extends to the next at sign,
+ * or to the end of the comment string, but excluding the final two
+ * characters which terminate the comment.
+ *
+ * Also take usecases into account - they need to expand until the next
+ * usecase or the end of the string, as they might include other sections
+ * of their own
+ */
+ def tagIndex(str: String, p: Int => Boolean = (idx => true)): List[(Int, Int)] = {
+ var indices = findAll(str, 0) (idx => str(idx) == '@' && p(idx))
+ indices = mergeUsecaseSections(str, indices)
+ indices = mergeInheritdocSections(str, indices)
+
+ indices match {
+ case List() => List()
+ case idxs => idxs zip (idxs.tail ::: List(str.length - 2))
+ }
+ }
+
+ /**
+ * Merge sections following an usecase into the usecase comment, so they
+ * can override the parent symbol's sections
+ */
+ def mergeUsecaseSections(str: String, idxs: List[Int]): List[Int] = {
+ idxs.indexWhere(str.startsWith("@usecase", _)) match {
+ case firstUCIndex if firstUCIndex != -1 =>
+ val commentSections = idxs.take(firstUCIndex)
+ val usecaseSections = idxs.drop(firstUCIndex).filter(str.startsWith("@usecase", _))
+ commentSections ::: usecaseSections
+ case _ =>
+ idxs
+ }
+ }
+
+ /**
+ * Merge the inheritdoc sections, as they never make sense on their own
+ */
+ def mergeInheritdocSections(str: String, idxs: List[Int]): List[Int] =
+ idxs.filterNot(str.startsWith("@inheritdoc", _))
+
+ /** Does interval `iv` start with given `tag`?
+ */
+ def startsWithTag(str: String, section: (Int, Int), tag: String): Boolean =
+ startsWithTag(str, section._1, tag)
+
+ def startsWithTag(str: String, start: Int, tag: String): Boolean =
+ str.startsWith(tag, start) && !isIdentifierPart(str charAt (start + tag.length))
+
+ /** The first start tag of a list of tag intervals,
+ * or the end of the whole comment string - 2 if list is empty
+ */
+ def startTag(str: String, sections: List[(Int, Int)]) = sections match {
+ case Nil => str.length - 2
+ case (start, _) :: _ => start
+ }
+
+ /** A map from parameter names to start/end indices describing all parameter
+ * sections in `str` tagged with `tag`, where `sections` is the index of `str`.
+ */
+ def paramDocs(str: String, tag: String, sections: List[(Int, Int)]): Map[String, (Int, Int)] =
+ Map() ++ {
+ for (section <- sections if startsWithTag(str, section, tag)) yield {
+ val start = skipWhitespace(str, section._1 + tag.length)
+ str.substring(start, skipIdent(str, start)) -> section
+ }
+ }
+
+ /** Optionally start and end index of return section in `str`, or `None`
+ * if `str` does not have a @group. */
+ def groupDoc(str: String, sections: List[(Int, Int)]): Option[(Int, Int)] =
+ sections find (startsWithTag(str, _, "@group"))
+
+
+ /** Optionally start and end index of return section in `str`, or `None`
+ * if `str` does not have a @return.
+ */
+ def returnDoc(str: String, sections: List[(Int, Int)]): Option[(Int, Int)] =
+ sections find (startsWithTag(str, _, "@return"))
+
+ /** Extracts variable name from a string, stripping any pair of surrounding braces */
+ def variableName(str: String): String =
+ if (str.length >= 2 && (str charAt 0) == '{' && (str charAt (str.length - 1)) == '}')
+ str.substring(1, str.length - 1)
+ else
+ str
+
+ /** Returns index following variable, or start index if no variable was recognized
+ */
+ def skipVariable(str: String, start: Int): Int = {
+ var idx = start
+ if (idx < str.length && (str charAt idx) == '{') {
+ do idx += 1
+ while (idx < str.length && (str charAt idx) != '}')
+ if (idx < str.length) idx + 1 else start
+ } else {
+ while (idx < str.length && isVarPart(str charAt idx))
+ idx += 1
+ idx
+ }
+ }
+
+ /** A map from the section tag to section parameters */
+ def sectionTagMap(str: String, sections: List[(Int, Int)]): Map[String, (Int, Int)] =
+ Map() ++ {
+ for (section <- sections) yield
+ extractSectionTag(str, section) -> section
+ }
+
+ /** Extract the section tag, treating the section tag as an identifier */
+ def extractSectionTag(str: String, section: (Int, Int)): String =
+ str.substring(section._1, skipTag(str, section._1))
+
+ /** Extract the section parameter */
+ def extractSectionParam(str: String, section: (Int, Int)): String = {
+ val (beg, _) = section
+ assert(str.startsWith("@param", beg) ||
+ str.startsWith("@tparam", beg) ||
+ str.startsWith("@throws", beg))
+
+ val start = skipWhitespace(str, skipTag(str, beg))
+ val finish = skipIdent(str, start)
+
+ str.substring(start, finish)
+ }
+
+ /** Extract the section text, except for the tag and comment newlines */
+ def extractSectionText(str: String, section: (Int, Int)): (Int, Int) = {
+ val (beg, end) = section
+ if (str.startsWith("@param", beg) ||
+ str.startsWith("@tparam", beg) ||
+ str.startsWith("@throws", beg))
+ (skipWhitespace(str, skipIdent(str, skipWhitespace(str, skipTag(str, beg)))), end)
+ else
+ (skipWhitespace(str, skipTag(str, beg)), end)
+ }
+
+ /** Cleanup section text */
+ def cleanupSectionText(str: String) = {
+ var result = str.trim.replaceAll("\n\\s+\\*\\s+", " \n")
+ while (result.endsWith("\n"))
+ result = result.substring(0, str.length - 1)
+ result
+ }
+
+
+ def removeSections(raw: String, xs: String*): String = {
+ val sections = tagIndex(raw)
+
+ val toBeRemoved = for {
+ section <- xs
+ lines = sections filter { startsWithTag(raw, _, section) }
+ } yield lines
+
+ val end = startTag(raw, toBeRemoved.flatten.sortBy(_._1).toList)
+
+ if (end == raw.length - 2) raw else raw.substring(0, end) + "*/"
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/util/DiffUtil.scala b/compiler/src/dotty/tools/dotc/util/DiffUtil.scala
new file mode 100644
index 000000000..b55aee719
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/DiffUtil.scala
@@ -0,0 +1,174 @@
+package dotty.tools.dotc.util
+
+import scala.annotation.tailrec
+import scala.collection.mutable
+
+object DiffUtil {
+
+ private final val ANSI_DEFAULT = "\u001B[0m"
+ private final val ANSI_RED = "\u001B[31m"
+ private final val ANSI_GREEN = "\u001B[32m"
+
+ private final val DELETION_COLOR = ANSI_RED
+ private final val ADDITION_COLOR = ANSI_GREEN
+
+ @tailrec private def splitTokens(str: String, acc: List[String] = Nil): List[String] = {
+ if (str == "") {
+ acc.reverse
+ } else {
+ val head = str.charAt(0)
+ val (token, rest) = if (Character.isAlphabetic(head) || Character.isDigit(head)) {
+ str.span(c => Character.isAlphabetic(c) || Character.isDigit(c))
+ } else if (Character.isMirrored(head) || Character.isWhitespace(head)) {
+ str.splitAt(1)
+ } else {
+ str.span { c =>
+ !Character.isAlphabetic(c) && !Character.isDigit(c) &&
+ !Character.isMirrored(c) && !Character.isWhitespace(c)
+ }
+ }
+ splitTokens(rest, token :: acc)
+ }
+ }
+
+
+ /** @return a tuple of the (found, expected, changedPercentage) diffs as strings */
+ def mkColoredTypeDiff(found: String, expected: String): (String, String, Double) = {
+ var totalChange = 0
+ val foundTokens = splitTokens(found, Nil).toArray
+ val expectedTokens = splitTokens(expected, Nil).toArray
+
+ val diffExp = hirschberg(foundTokens, expectedTokens)
+ val diffAct = hirschberg(expectedTokens, foundTokens)
+
+ val exp = diffExp.collect {
+ case Unmodified(str) => str
+ case Inserted(str) =>
+ totalChange += str.length
+ ADDITION_COLOR + str + ANSI_DEFAULT
+ }.mkString
+
+ val fnd = diffAct.collect {
+ case Unmodified(str) => str
+ case Inserted(str) =>
+ totalChange += str.length
+ DELETION_COLOR + str + ANSI_DEFAULT
+ }.mkString
+
+ (fnd, exp, totalChange.toDouble / (expected.length + found.length))
+ }
+
+ def mkColoredCodeDiff(code: String, lastCode: String, printDiffDel: Boolean): String = {
+
+ val tokens = splitTokens(code, Nil).toArray
+ val lastTokens = splitTokens(lastCode, Nil).toArray
+
+ val diff = hirschberg(lastTokens, tokens)
+
+ diff.collect {
+ case Unmodified(str) => str
+ case Inserted(str) => ADDITION_COLOR + str + ANSI_DEFAULT
+ case Modified(old, str) if printDiffDel => DELETION_COLOR + old + ADDITION_COLOR + str + ANSI_DEFAULT
+ case Modified(_, str) => ADDITION_COLOR + str + ANSI_DEFAULT
+ case Deleted(str) if printDiffDel => DELETION_COLOR + str + ANSI_DEFAULT
+ }.mkString
+ }
+
+ private sealed trait Patch
+ private final case class Unmodified(str: String) extends Patch
+ private final case class Modified(original: String, str: String) extends Patch
+ private final case class Deleted(str: String) extends Patch
+ private final case class Inserted(str: String) extends Patch
+
+ private def hirschberg(a: Array[String], b: Array[String]): Array[Patch] = {
+ def build(x: Array[String], y: Array[String], builder: mutable.ArrayBuilder[Patch]): Unit = {
+ if (x.isEmpty) {
+ builder += Inserted(y.mkString)
+ } else if (y.isEmpty) {
+ builder += Deleted(x.mkString)
+ } else if (x.length == 1 || y.length == 1) {
+ needlemanWunsch(x, y, builder)
+ } else {
+ val xlen = x.length
+ val xmid = xlen / 2
+ val ylen = y.length
+
+ val (x1, x2) = x.splitAt(xmid)
+ val leftScore = nwScore(x1, y)
+ val rightScore = nwScore(x2.reverse, y.reverse)
+ val scoreSum = (leftScore zip rightScore.reverse).map {
+ case (left, right) => left + right
+ }
+ val max = scoreSum.max
+ val ymid = scoreSum.indexOf(max)
+
+ val (y1, y2) = y.splitAt(ymid)
+ build(x1, y1, builder)
+ build(x2, y2, builder)
+ }
+ }
+ val builder = Array.newBuilder[Patch]
+ build(a, b, builder)
+ builder.result()
+ }
+
+ private def nwScore(x: Array[String], y: Array[String]): Array[Int] = {
+ def ins(s: String) = -2
+ def del(s: String) = -2
+ def sub(s1: String, s2: String) = if (s1 == s2) 2 else -1
+
+ val score = Array.fill(x.length + 1, y.length + 1)(0)
+ for (j <- 1 to y.length)
+ score(0)(j) = score(0)(j - 1) + ins(y(j - 1))
+ for (i <- 1 to x.length) {
+ score(i)(0) = score(i - 1)(0) + del(x(i - 1))
+ for (j <- 1 to y.length) {
+ val scoreSub = score(i - 1)(j - 1) + sub(x(i - 1), y(j - 1))
+ val scoreDel = score(i - 1)(j) + del(x(i - 1))
+ val scoreIns = score(i)(j - 1) + ins(y(j - 1))
+ score(i)(j) = scoreSub max scoreDel max scoreIns
+ }
+ }
+ Array.tabulate(y.length + 1)(j => score(x.length)(j))
+ }
+
+ private def needlemanWunsch(x: Array[String], y: Array[String], builder: mutable.ArrayBuilder[Patch]): Unit = {
+ def similarity(a: String, b: String) = if (a == b) 2 else -1
+ val d = 1
+ val score = Array.tabulate(x.length + 1, y.length + 1) { (i, j) =>
+ if (i == 0) d * j
+ else if (j == 0) d * i
+ else 0
+ }
+ for (i <- 1 to x.length) {
+ for (j <- 1 to y.length) {
+ val mtch = score(i - 1)(j - 1) + similarity(x(i - 1), y(j - 1))
+ val delete = score(i - 1)(j) + d
+ val insert = score(i)(j - 1) + d
+ score(i)(j) = mtch max insert max delete
+ }
+ }
+
+ var alignment = List.empty[Patch]
+ var i = x.length
+ var j = y.length
+ while (i > 0 || j > 0) {
+ if (i > 0 && j > 0 && score(i)(j) == score(i - 1)(j - 1) + similarity(x(i - 1), y(j - 1))) {
+ val newHead =
+ if (x(i - 1) == y(j - 1)) Unmodified(x(i - 1))
+ else Modified(x(i - 1), y(j - 1))
+ alignment = newHead :: alignment
+ i = i - 1
+ j = j - 1
+ } else if (i > 0 && score(i)(j) == score(i - 1)(j) + d) {
+ alignment = Deleted(x(i - 1)) :: alignment
+ i = i - 1
+ } else {
+ alignment = Inserted(y(j - 1)) :: alignment
+ j = j - 1
+ }
+ }
+ builder ++= alignment
+ }
+
+}
diff --git a/compiler/src/dotty/tools/dotc/util/DotClass.scala b/compiler/src/dotty/tools/dotc/util/DotClass.scala
new file mode 100644
index 000000000..cdb697a45
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/DotClass.scala
@@ -0,0 +1,12 @@
+package dotty.tools.dotc.util
+
+/** Adds standard functionality to a class.
+ * For now: Just the `unsupported` method.
+ */
+class DotClass {
+
+ /** Throws an `UnsupportedOperationException` with the given method name. */
+ def unsupported(methodName: String): Nothing =
+ throw new UnsupportedOperationException(s"$getClass.$methodName")
+
+}
diff --git a/compiler/src/dotty/tools/dotc/util/FreshNameCreator.scala b/compiler/src/dotty/tools/dotc/util/FreshNameCreator.scala
new file mode 100644
index 000000000..521947895
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/FreshNameCreator.scala
@@ -0,0 +1,33 @@
+package dotty.tools
+package dotc
+package util
+
+import scala.collection.mutable
+
+trait FreshNameCreator {
+ def newName(prefix: String = ""): String
+
+ @deprecated("use newName(prefix)", "2.9.0")
+ def newName(pos: scala.reflect.internal.util.Position, prefix: String): String = newName(prefix)
+ @deprecated("use newName()", "2.9.0")
+ def newName(pos: scala.reflect.internal.util.Position): String = newName()
+}
+
+object FreshNameCreator {
+ class Default extends FreshNameCreator {
+ protected var counter = 0
+ protected val counters = mutable.AnyRefMap[String, Int]() withDefaultValue 0
+
+ /**
+ * Create a fresh name with the given prefix. It is guaranteed
+ * that the returned name has never been returned by a previous
+ * call to this function (provided the prefix does not end in a digit).
+ */
+ def newName(prefix: String): String = {
+ val safePrefix = prefix.replaceAll("""[<>]""", """\$""")
+ counters(safePrefix) += 1
+ val counter = counters(safePrefix)
+ if (prefix.isEmpty) "$" + counter + "$" else safePrefix + counter
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/util/HashSet.scala b/compiler/src/dotty/tools/dotc/util/HashSet.scala
new file mode 100644
index 000000000..ff470ef5d
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/HashSet.scala
@@ -0,0 +1,146 @@
+package dotty.tools.dotc.util
+
+/** A hash set that allows some privileged protected access to its internals
+ */
+class HashSet[T >: Null <: AnyRef](initialCapacity: Int, loadFactor: Float = 0.25f) extends Set[T] {
+ private var used: Int = _
+ private var limit: Int = _
+ private var table: Array[AnyRef] = _
+
+ clear()
+
+ /** The number of elements in the set */
+ def size: Int = used
+
+ private def allocate(size: Int) = {
+ table = new Array[AnyRef](size)
+ limit = (size * loadFactor).toInt
+ }
+
+ /** Remove all elements from this set and set back to initial configuration */
+ def clear(): Unit = {
+ used = 0
+ allocate(initialCapacity)
+ }
+
+ /** Turn hashode `x` into a table index */
+ private def index(x: Int): Int = math.abs(x % table.length)
+
+ /** Hashcode, can be overridden */
+ def hash(x: T): Int = x.hashCode
+
+ /** Find entry such that `x equals entry`. If it exists, return it.
+ * If not, enter `x` in set and return `x`.
+ */
+ def findEntryOrUpdate(x: T): T = {
+ var h = index(hash(x))
+ var entry = table(h)
+ while (entry ne null) {
+ if (x equals entry) return entry.asInstanceOf[T]
+ h = index(h + 1)
+ entry = table(h)
+ }
+ addEntryAt(h, x)
+ }
+
+ /** Add entry at `x` at index `idx` */
+ private def addEntryAt(idx: Int, x: T) = {
+ table(idx) = x
+ used += 1
+ if (used > limit) growTable()
+ x
+ }
+
+ /** The entry in the set such that `x equals entry`, or else `null`. */
+ def findEntry(x: T): T = {
+ var h = index(hash(x))
+ var entry = table(h)
+ while ((entry ne null) && !(x equals entry)) {
+ h = index(h + 1)
+ entry = table(h)
+ }
+ entry.asInstanceOf[T]
+ }
+
+ private var rover: Int = -1
+
+ /** Add entry `x` to set */
+ def addEntry(x: T): Unit = {
+ var h = index(hash(x))
+ var entry = table(h)
+ while (entry ne null) {
+ if (x equals entry) return
+ h = index(h + 1)
+ entry = table(h)
+ }
+ table(h) = x
+ used += 1
+ if (used > (table.length >> 2)) growTable()
+ }
+
+ /** Add all entries in `xs` to set */
+ def addEntries(xs: TraversableOnce[T]): Unit = {
+ xs foreach addEntry
+ }
+
+ /** The iterator of all elements in the set */
+ def iterator = new Iterator[T] {
+ private var i = 0
+ def hasNext: Boolean = {
+ while (i < table.length && (table(i) eq null)) i += 1
+ i < table.length
+ }
+ def next(): T =
+ if (hasNext) { i += 1; table(i - 1).asInstanceOf[T] }
+ else null
+ }
+
+ /** Privileged access: Find first entry with given hashcode */
+ protected def findEntryByHash(hashCode: Int): T = {
+ rover = index(hashCode)
+ nextEntryByHash(hashCode)
+ }
+
+ /** Privileged access: Find next entry with given hashcode. Needs to immediately
+ * follow a `findEntryByhash` or `nextEntryByHash` operation.
+ */
+ protected def nextEntryByHash(hashCode: Int): T = {
+ var entry = table(rover)
+ while (entry ne null) {
+ rover = index(rover + 1)
+ if (hash(entry.asInstanceOf[T]) == hashCode) return entry.asInstanceOf[T]
+ entry = table(rover)
+ }
+ null
+ }
+
+ /** Privileged access: Add entry `x` at the last position where an unsuccsessful
+ * `findEntryByHash` or `nextEntryByhash` operation returned. Needs to immediately
+ * follow a `findEntryByhash` or `nextEntryByHash` operation that was unsucessful,
+ * i.e. that returned `null`.
+ */
+ protected def addEntryAfterScan(x: T): T = addEntryAt(rover, x)
+
+ private def addOldEntry(x: T): Unit = {
+ var h = index(hash(x))
+ var entry = table(h)
+ while (entry ne null) {
+ h = index(h + 1)
+ entry = table(h)
+ }
+ table(h) = x
+ }
+
+ private def growTable(): Unit = {
+ val oldtable = table
+ allocate(table.length * 2)
+ var i = 0
+ while (i < oldtable.length) {
+ val entry = oldtable(i)
+ if (entry ne null) addOldEntry(entry.asInstanceOf[T])
+ i += 1
+ }
+ }
+
+ override def toString() = "HashSet(%d / %d)".format(used, table.length)
+}
diff --git a/compiler/src/dotty/tools/dotc/util/LRUCache.scala b/compiler/src/dotty/tools/dotc/util/LRUCache.scala
new file mode 100644
index 000000000..5f53e81c4
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/LRUCache.scala
@@ -0,0 +1,100 @@
+package dotty.tools.dotc.util
+
+import reflect.ClassTag
+import annotation.tailrec
+
+/** A least-recently-used cache for Key -> Value computations
+ * It currently keeps the last 8 associations, but this can be
+ * changed to anywhere between 2 and 16 by changing `LRUCache.Retained`.
+ *
+ * Implementation: We keep a ring of eight places, linked
+ * with the `next` data structure. The ring models a priority queue.
+ * `last` points to the last element of the queue, and
+ * `next(last)` to the first one. Lookups compare keys
+ * sequentially from first to last. Elements with successful lookup
+ * get promoted to be first in the queue. Elements are evicted
+ * at the `last` position.
+ */
+class LRUCache[Key >: Null <: AnyRef : ClassTag, Value >: Null: ClassTag] {
+ import LRUCache._
+ val keys = new Array[Key](Retained)
+ val values = new Array[Value](Retained)
+ var next = new SixteenNibbles(initialRing.bits)
+ var last = Retained - 1 // value is arbitrary
+ var lastButOne: Int = last - 1
+
+ def first = next(last)
+
+ /** Lookup key, returning value or `null` for not found.
+ * As a side effect, sets `lastButOne` to the element before `last`
+ * if key was not found.
+ */
+ def lookup(key: Key): Value = {
+ @tailrec
+ def lookupNext(prev: Int, current: Int, nx: SixteenNibbles): Value = {
+ val follow = nx(current)
+ if (keys(current) eq key) {
+ // arrange so that found element is at position `first`.
+ if (current == last) last = prev
+ else if (prev != last) {
+ next = next.updated(prev, follow)
+ next = next.updated(current, first)
+ next = next.updated(last, current)
+ }
+ values(current)
+ } else if (current == last) {
+ lastButOne = prev
+ null
+ } else
+ lookupNext(current, follow, nx)
+ }
+ lookupNext(last, first, next)
+ }
+
+ /** Enter key/value in cache at position `last`.
+ * As a side effect, sets `last` to `lastButOne`.
+ * If `lastButOne` was set by a preceding unsuccessful `lookup`
+ * for the same key, this means that the new element is now the
+ * first in the queue. If there was no preceding lookup, the element
+ * is inserted at a random position in the queue.
+ */
+ def enter(key: Key, value: Value): Unit = {
+ keys(last) = key
+ values(last) = value
+ last = lastButOne
+ }
+
+ /** Invalidate key. The invalidated element becomes
+ * the last in the queue.
+ */
+ def invalidate(key: Key): Unit =
+ if (lookup(key) != null) {
+ keys(first) = null
+ last = first
+ }
+
+ def indices: Iterator[Int] = Iterator.iterate(first)(next.apply)
+
+ def keysIterator: Iterator[Key] =
+ indices take Retained map keys filter (_ != null)
+
+ override def toString = {
+ val assocs = keysIterator
+ .toList // double reverse so that lookups do not perturb order
+ .reverse
+ .map(key => s"$key -> ${lookup(key)}")
+ .reverse
+ s"LRUCache(${assocs.mkString(", ")})"
+ }
+}
+
+object LRUCache {
+
+ /** The number of retained elements in the cache; must be at most 16. */
+ val Retained = 16
+
+ /** The initial ring: 0 -> 1 -> ... -> 7 -> 0 */
+ val initialRing =
+ (new SixteenNibbles(0L) /: (0 until Retained))((nibbles, idx) =>
+ nibbles.updated(idx, (idx + 1) % Retained))
+}
diff --git a/compiler/src/dotty/tools/dotc/util/NameTransformer.scala b/compiler/src/dotty/tools/dotc/util/NameTransformer.scala
new file mode 100644
index 000000000..330d513fe
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/NameTransformer.scala
@@ -0,0 +1,163 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package dotty.tools.dotc
+package util
+
+import core.Names._
+import core.Decorators._
+
+/** Provides functions to encode and decode Scala symbolic names.
+ * Also provides some constants.
+ */
+object NameTransformer {
+ // XXX Short term: providing a way to alter these without having to recompile
+ // the compiler before recompiling the compiler.
+ val MODULE_SUFFIX_STRING = sys.props.getOrElse("SCALA_MODULE_SUFFIX_STRING", "$")
+ val NAME_JOIN_STRING = sys.props.getOrElse("SCALA_NAME_JOIN_STRING", "$")
+ val MODULE_INSTANCE_NAME = "MODULE$"
+
+ private val nops = 128
+ private val ncodes = 26 * 26
+
+ private class OpCodes(val op: Char, val code: String, val next: OpCodes)
+
+ private val op2code = new Array[String](nops)
+ private val code2op = new Array[OpCodes](ncodes)
+ private def enterOp(op: Char, code: String) = {
+ op2code(op) = code
+ val c = (code.charAt(1) - 'a') * 26 + code.charAt(2) - 'a'
+ code2op(c) = new OpCodes(op, code, code2op(c))
+ }
+
+ /* Note: decoding assumes opcodes are only ever lowercase. */
+ enterOp('~', "$tilde")
+ enterOp('=', "$eq")
+ enterOp('<', "$less")
+ enterOp('>', "$greater")
+ enterOp('!', "$bang")
+ enterOp('#', "$hash")
+ enterOp('%', "$percent")
+ enterOp('^', "$up")
+ enterOp('&', "$amp")
+ enterOp('|', "$bar")
+ enterOp('*', "$times")
+ enterOp('/', "$div")
+ enterOp('+', "$plus")
+ enterOp('-', "$minus")
+ enterOp(':', "$colon")
+ enterOp('\\', "$bslash")
+ enterOp('?', "$qmark")
+ enterOp('@', "$at")
+
+ /** Replace operator symbols by corresponding `\$opname`.
+ *
+ * @param name the string to encode
+ * @return the string with all recognized opchars replaced with their encoding
+ */
+ def encode[N <: Name](name: N): N = {
+ var buf: StringBuilder = null
+ val len = name.length
+ var i = 0
+ while (i < len) {
+ val c = name(i)
+ if (c < nops && (op2code(c) ne null)) {
+ if (buf eq null) {
+ buf = new StringBuilder()
+ buf.append(name.slice(0, i))
+ }
+ buf.append(op2code(c))
+ /* Handle glyphs that are not valid Java/JVM identifiers */
+ }
+ else if (!Character.isJavaIdentifierPart(c)) {
+ if (buf eq null) {
+ buf = new StringBuilder()
+ buf.append(name.slice(0, i))
+ }
+ buf.append("$u%04X".format(c.toInt))
+ }
+ else if (buf ne null) {
+ buf.append(c)
+ }
+ i += 1
+ }
+ if (buf eq null) name
+ else if (name.isTermName) buf.toString.toTermName.asInstanceOf[N]
+ else buf.toString.toTypeName.asInstanceOf[N]
+ }
+
+ /** Replace `\$opname` by corresponding operator symbol.
+ *
+ * @param name0 the string to decode
+ * @return the string with all recognized operator symbol encodings replaced with their name
+ */
+ def decode(name0: String): String = {
+ //System.out.println("decode: " + name);//DEBUG
+ val name = if (name0.endsWith("<init>")) name0.substring(0, name0.length() - ("<init>").length()) + "this"
+ else name0
+ var buf: StringBuilder = null
+ val len = name.length()
+ var i = 0
+ while (i < len) {
+ var ops: OpCodes = null
+ var unicode = false
+ val c = name charAt i
+ if (c == '$' && i + 2 < len) {
+ val ch1 = name.charAt(i + 1)
+ if ('a' <= ch1 && ch1 <= 'z') {
+ val ch2 = name.charAt(i + 2)
+ if ('a' <= ch2 && ch2 <= 'z') {
+ ops = code2op((ch1 - 'a') * 26 + ch2 - 'a')
+ while ((ops ne null) && !name.startsWith(ops.code, i)) ops = ops.next
+ if (ops ne null) {
+ if (buf eq null) {
+ buf = new StringBuilder()
+ buf.append(name.substring(0, i))
+ }
+ buf.append(ops.op)
+ i += ops.code.length()
+ }
+ /* Handle the decoding of Unicode glyphs that are
+ * not valid Java/JVM identifiers */
+ } else if ((len - i) >= 6 && // Check that there are enough characters left
+ ch1 == 'u' &&
+ ((Character.isDigit(ch2)) ||
+ ('A' <= ch2 && ch2 <= 'F'))) {
+ /* Skip past "$u", next four should be hexadecimal */
+ val hex = name.substring(i + 2, i + 6)
+ try {
+ val str = Integer.parseInt(hex, 16).toChar
+ if (buf eq null) {
+ buf = new StringBuilder()
+ buf.append(name.substring(0, i))
+ }
+ buf.append(str)
+ /* 2 for "$u", 4 for hexadecimal number */
+ i += 6
+ unicode = true
+ } catch {
+ case _:NumberFormatException =>
+ /* `hex` did not decode to a hexadecimal number, so
+ * do nothing. */
+ }
+ }
+ }
+ }
+ /* If we didn't see an opcode or encoded Unicode glyph, and the
+ buffer is non-empty, write the current character and advance
+ one */
+ if ((ops eq null) && !unicode) {
+ if (buf ne null)
+ buf.append(c)
+ i += 1
+ }
+ }
+ //System.out.println("= " + (if (buf == null) name else buf.toString()));//DEBUG
+ if (buf eq null) name else buf.toString()
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/util/Positions.scala b/compiler/src/dotty/tools/dotc/util/Positions.scala
new file mode 100644
index 000000000..c3890cc9a
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/Positions.scala
@@ -0,0 +1,173 @@
+package dotty.tools.dotc
+package util
+import language.implicitConversions
+
+/** Position format in little endian:
+ * Start: unsigned 26 Bits (works for source files up to 64M)
+ * End: unsigned 26 Bits
+ * Point: unsigned 12 Bits relative to start
+ * NoPosition encoded as -1L (this is a normally invalid position
+ * because point would lie beyond end.
+ */
+object Positions {
+
+ private val StartEndBits = 26
+ val StartEndMask: Long = (1L << StartEndBits) - 1
+ private val SyntheticPointDelta = (1 << (64 - StartEndBits * 2)) - 1
+
+ /** The maximal representable offset in a position */
+ val MaxOffset = StartEndMask
+
+ /** Convert offset `x` to an integer by sign extending the original
+ * field of `StartEndBits` width.
+ */
+ def offsetToInt(x: Int) =
+ x << (32 - StartEndBits) >> (32 - StartEndBits)
+
+ /** A position indicates a range between a start offset and an end offset.
+ * Positions can be synthetic or source-derived. A source-derived position
+ * has in addition a point lies somewhere between start and end. The point
+ * is roughly where the ^ would go if an error was diagnosed at that position.
+ * All quantities are encoded opaquely in a Long.
+ */
+ class Position(val coords: Long) extends AnyVal {
+
+ /** Is this position different from NoPosition? */
+ def exists = this != NoPosition
+
+ /** The start of this position. */
+ def start: Int = {
+ assert(exists)
+ (coords & StartEndMask).toInt
+ }
+
+ /** The end of this position */
+ def end: Int = {
+ assert(exists)
+ ((coords >>> StartEndBits) & StartEndMask).toInt
+ }
+
+ /** The point of this position, returns start for synthetic positions */
+ def point: Int = {
+ assert(exists)
+ val poff = pointDelta
+ if (poff == SyntheticPointDelta) start else start + poff
+ }
+
+ /** The difference between point and start in this position */
+ def pointDelta =
+ (coords >>> (StartEndBits * 2)).toInt
+
+ def orElse(that: Position) =
+ if (this.exists) this else that
+
+ /** The union of two positions. This is the least range that encloses
+ * both positions. It is always a synthetic position.
+ */
+ def union(that: Position) =
+ if (!this.exists) that
+ else if (!that.exists) this
+ else Position(this.start min that.start, this.end max that.end, this.point)
+
+ /** Does the range of this position contain the one of that position? */
+ def contains(that: Position): Boolean =
+ !that.exists || exists && (start <= that.start && end >= that.end)
+
+ /** Is this position synthetic? */
+ def isSynthetic = pointDelta == SyntheticPointDelta
+
+ /** Is this position source-derived? */
+ def isSourceDerived = !isSynthetic
+
+ /** A position where all components are shifted by a given `offset`
+ * relative to this position.
+ */
+ def shift(offset: Int) =
+ if (exists) fromOffsets(start + offset, end + offset, pointDelta)
+ else this
+
+ /** The zero-extent position with start and end at the point of this position */
+ def focus = if (exists) Position(point) else NoPosition
+
+ /** The zero-extent position with start and end at the start of this position */
+ def startPos = if (exists) Position(start) else NoPosition
+
+ /** The zero-extent position with start and end at the end of this position */
+ def endPos = if (exists) Position(end) else NoPosition
+
+ /** A copy of this position with a different start */
+ def withStart(start: Int) =
+ fromOffsets(start, this.end, if (isSynthetic) SyntheticPointDelta else this.point - start)
+
+ /** A copy of this position with a different end */
+ def withEnd(end: Int) = fromOffsets(this.start, end, pointDelta)
+
+ /** A copy of this position with a different point */
+ def withPoint(point: Int) = fromOffsets(this.start, this.end, point - this.start)
+
+ /** A synthetic copy of this position */
+ def toSynthetic = if (isSynthetic) this else Position(start, end)
+
+ override def toString = {
+ val (left, right) = if (isSynthetic) ("<", ">") else ("[", "]")
+ if (exists)
+ s"$left$start..${if (point == start) "" else s"$point.."}$end$right"
+ else
+ s"${left}no position${right}"
+ }
+ }
+
+ private def fromOffsets(start: Int, end: Int, pointDelta: Int) = {
+ //assert(start <= end || start == 1 && end == 0, s"$start..$end")
+ new Position(
+ (start & StartEndMask).toLong |
+ ((end & StartEndMask).toLong << StartEndBits) |
+ (pointDelta.toLong << (StartEndBits * 2)))
+ }
+
+ /** A synthetic position with given start and end */
+ def Position(start: Int, end: Int): Position = {
+ val pos = fromOffsets(start, end, SyntheticPointDelta)
+ assert(pos.isSynthetic)
+ pos
+ }
+
+ /** A source-derived position with given start, end, and point delta */
+ def Position(start: Int, end: Int, point: Int): Position = {
+ val pointDelta = (point - start) max 0
+ val pos = fromOffsets(start, end, if (pointDelta >= SyntheticPointDelta) 0 else pointDelta)
+ assert(pos.isSourceDerived)
+ pos
+ }
+
+ /** A synthetic zero-extent position that starts and ends at given `start`. */
+ def Position(start: Int): Position = Position(start, start)
+
+ /** A sentinel for a non-existing position */
+ val NoPosition = Position(1, 0)
+
+ /** The coordinate of a symbol. This is either an index or
+ * a zero-range position.
+ */
+ class Coord(val encoding: Int) extends AnyVal {
+ def isIndex = encoding > 0
+ def isPosition = encoding <= 0
+ def toIndex: Int = {
+ assert(isIndex)
+ encoding - 1
+ }
+ def toPosition = {
+ assert(isPosition)
+ if (this == NoCoord) NoPosition else Position(-1 - encoding)
+ }
+ }
+
+ /** An index coordinate */
+ implicit def indexCoord(n: Int): Coord = new Coord(n + 1)
+ implicit def positionCoord(pos: Position): Coord =
+ if (pos.exists) new Coord(-(pos.point + 1))
+ else NoCoord
+
+ /** A sentinel for a missing coordinate */
+ val NoCoord = new Coord(0)
+}
diff --git a/compiler/src/dotty/tools/dotc/util/Property.scala b/compiler/src/dotty/tools/dotc/util/Property.scala
new file mode 100644
index 000000000..608fc88e6
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/Property.scala
@@ -0,0 +1,10 @@
+package dotty.tools.dotc.util
+
+/** Defines a key type with which to tag properties, such as attachments
+ * or context properties
+ */
+object Property {
+
+ /** The class of keys for properties of type V */
+ class Key[+V]
+} \ No newline at end of file
diff --git a/compiler/src/dotty/tools/dotc/util/Set.scala b/compiler/src/dotty/tools/dotc/util/Set.scala
new file mode 100644
index 000000000..3e906c6a8
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/Set.scala
@@ -0,0 +1,27 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+package dotty.tools.dotc.util
+
+/** A common class for lightweight sets.
+ */
+abstract class Set[T >: Null] {
+
+ def findEntry(x: T): T
+
+ def addEntry(x: T): Unit
+
+ def iterator: Iterator[T]
+
+ def foreach[U](f: T => U): Unit = iterator foreach f
+
+ def apply(x: T): Boolean = contains(x)
+
+ def contains(x: T): Boolean =
+ findEntry(x) != null
+
+ def toList = iterator.toList
+
+ def clear: Unit
+}
diff --git a/compiler/src/dotty/tools/dotc/util/ShowPickled.scala b/compiler/src/dotty/tools/dotc/util/ShowPickled.scala
new file mode 100644
index 000000000..477449074
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/ShowPickled.scala
@@ -0,0 +1,287 @@
+package dotty.tools.dotc
+package util
+
+import java.io.{File, FileInputStream, PrintStream}
+import java.lang.Long.toHexString
+import java.lang.Float.intBitsToFloat
+import java.lang.Double.longBitsToDouble
+import scala.reflect.internal.Flags
+import scala.reflect.internal.pickling.PickleFormat
+import core.unpickleScala2.PickleBuffer
+import core.Names._
+
+object ShowPickled {
+ import PickleFormat._
+
+ case class PickleBufferEntry(num: Int, startIndex: Int, tag: Int, bytes: Array[Byte]) {
+ def isName = tag == TERMname || tag == TYPEname
+ def hasName = tag match {
+ case TYPEsym | ALIASsym | CLASSsym | MODULEsym | VALsym | EXTref | EXTMODCLASSref => true
+ case _ => false
+ }
+ def readName =
+ if (isName) new String(bytes, "UTF-8")
+ else sys.error("%s is no name" format tagName)
+ def nameIndex =
+ if (hasName) readNat(bytes, 0)
+ else sys.error("%s has no name" format tagName)
+
+ def tagName = tag2string(tag)
+ override def toString = "%d,%d: %s".format(num, startIndex, tagName)
+ }
+
+ case class PickleBufferEntryList(entries: IndexedSeq[PickleBufferEntry]) {
+ def nameAt(idx: Int) = {
+ val entry = entries(idx)
+ if (entry.isName) entry.readName
+ else if (entry.hasName) entries(entry.nameIndex).readName
+ else "?"
+ }
+ }
+
+ def makeEntryList(buf: PickleBuffer, index: Array[Int]) = {
+ val entries = buf.toIndexedSeq.zipWithIndex map {
+ case ((tag, data), num) => PickleBufferEntry(num, index(num), tag, data)
+ }
+
+ PickleBufferEntryList(entries)
+ }
+
+ def tag2string(tag: Int): String = tag match {
+ case TERMname => "TERMname"
+ case TYPEname => "TYPEname"
+ case NONEsym => "NONEsym"
+ case TYPEsym => "TYPEsym"
+ case ALIASsym => "ALIASsym"
+ case CLASSsym => "CLASSsym"
+ case MODULEsym => "MODULEsym"
+ case VALsym => "VALsym"
+ case EXTref => "EXTref"
+ case EXTMODCLASSref => "EXTMODCLASSref"
+ case NOtpe => "NOtpe"
+ case NOPREFIXtpe => "NOPREFIXtpe"
+ case THIStpe => "THIStpe"
+ case SINGLEtpe => "SINGLEtpe"
+ case CONSTANTtpe => "CONSTANTtpe"
+ case TYPEREFtpe => "TYPEREFtpe"
+ case TYPEBOUNDStpe => "TYPEBOUNDStpe"
+ case REFINEDtpe => "REFINEDtpe"
+ case CLASSINFOtpe => "CLASSINFOtpe"
+ case METHODtpe => "METHODtpe"
+ case POLYtpe => "POLYtpe"
+ case IMPLICITMETHODtpe => "METHODtpe" // IMPLICITMETHODtpe no longer used.
+ case SUPERtpe => "SUPERtpe"
+ case LITERALunit => "LITERALunit"
+ case LITERALboolean => "LITERALboolean"
+ case LITERALbyte => "LITERALbyte"
+ case LITERALshort => "LITERALshort"
+ case LITERALchar => "LITERALchar"
+ case LITERALint => "LITERALint"
+ case LITERALlong => "LITERALlong"
+ case LITERALfloat => "LITERALfloat"
+ case LITERALdouble => "LITERALdouble"
+ case LITERALstring => "LITERALstring"
+ case LITERALnull => "LITERALnull"
+ case LITERALclass => "LITERALclass"
+ case LITERALenum => "LITERALenum"
+ case SYMANNOT => "SYMANNOT"
+ case CHILDREN => "CHILDREN"
+ case ANNOTATEDtpe => "ANNOTATEDtpe"
+ case ANNOTINFO => "ANNOTINFO"
+ case ANNOTARGARRAY => "ANNOTARGARRAY"
+ // case DEBRUIJNINDEXtpe => "DEBRUIJNINDEXtpe"
+ case EXISTENTIALtpe => "EXISTENTIALtpe"
+ case TREE => "TREE"
+ case MODIFIERS => "MODIFIERS"
+
+ case _ => "***BAD TAG***(" + tag + ")"
+ }
+
+ /** Extremely regrettably, essentially copied from PickleBuffer.
+ */
+ def readNat(data: Array[Byte], index: Int): Int = {
+ var idx = index
+ var result = 0L
+ var b = 0L
+ do {
+ b = data(idx)
+ idx += 1
+ result = (result << 7) + (b & 0x7f)
+ } while((b & 0x80) != 0L)
+
+ result.toInt
+ }
+
+ def printFile(buf: PickleBuffer, out: PrintStream = System.out): Unit = {
+ out.println("Version " + buf.readNat() + "." + buf.readNat())
+ val index = buf.createIndex
+ val entryList = makeEntryList(buf, index)
+ buf.readIndex = 0
+
+ def p(s: String) = out print s
+
+ def printNameRef(): Unit = {
+ val idx = buf.readNat()
+ val name = entryList nameAt idx
+ val toPrint = " %s(%s)".format(idx, name)
+
+ out print toPrint
+ }
+
+ def printNat() = p(" " + buf.readNat())
+ def printReadNat(x: Int) = p(" " + x)
+
+ def printSymbolRef() = printNat()
+ def printTypeRef() = printNat()
+ def printConstantRef() = printNat()
+ def printAnnotInfoRef() = printNat()
+ def printConstAnnotArgRef() = printNat()
+ def printAnnotArgRef() = printNat()
+
+ def printSymInfo(end: Int, isType: Boolean): Unit = {
+ printNameRef()
+ printSymbolRef()
+ val pflags = buf.readLongNat()
+ def printFlags(privateWithin: Option[Int]) = {
+ val accessBoundary = (
+ for (idx <- privateWithin) yield {
+ val s = entryList nameAt idx
+ idx + "(" + s + ")"
+ }
+ )
+ val flagString = PickleBuffer.unpickleScalaFlags(pflags, isType).toString
+ out.print(" %s[%s]".format(toHexString(pflags), flagString))
+ }
+
+ /** Might be info or privateWithin */
+ val x = buf.readNat()
+ if (buf.readIndex == end) {
+ printFlags(None)
+ printReadNat(x)
+ }
+ else {
+ printFlags(Some(x))
+ printTypeRef()
+ }
+ }
+
+ /** Note: the entries which require some semantic analysis to be correctly
+ * interpreted are for the most part going to tell you the wrong thing.
+ * It's not so easy to duplicate the logic applied in the UnPickler.
+ */
+ def printEntry(i: Int): Unit = {
+ buf.readIndex = index(i)
+ p(i + "," + buf.readIndex + ": ")
+ val tag = buf.readByte()
+ out.print(tag2string(tag))
+ val len = buf.readNat()
+ val end = len + buf.readIndex
+ p(" " + len + ":")
+ tag match {
+ case TERMname =>
+ out.print(" ")
+ out.print(termName(buf.bytes, buf.readIndex, len).toString)
+ buf.readIndex = end
+ case TYPEname =>
+ out.print(" ")
+ out.print(typeName(buf.bytes, buf.readIndex, len))
+ buf.readIndex = end
+ case TYPEsym | ALIASsym | CLASSsym | MODULEsym | VALsym =>
+ printSymInfo(end, tag == TYPEsym || tag == ALIASsym || tag == CLASSsym)
+ if (tag == CLASSsym && (buf.readIndex < end)) printTypeRef()
+ case EXTref | EXTMODCLASSref =>
+ printNameRef()
+ if (buf.readIndex < end) { printSymbolRef() }
+ case THIStpe =>
+ printSymbolRef()
+ case SINGLEtpe =>
+ printTypeRef(); printSymbolRef()
+ case CONSTANTtpe =>
+ printTypeRef(); printConstantRef()
+ case TYPEREFtpe =>
+ printTypeRef(); printSymbolRef(); buf.until(end, printTypeRef)
+ case TYPEBOUNDStpe =>
+ printTypeRef(); printTypeRef()
+ case REFINEDtpe =>
+ printSymbolRef(); buf.until(end, printTypeRef)
+ case CLASSINFOtpe =>
+ printSymbolRef(); buf.until(end, printTypeRef)
+ case METHODtpe | IMPLICITMETHODtpe =>
+ printTypeRef(); buf.until(end, printTypeRef)
+ case POLYtpe =>
+ printTypeRef(); buf.until(end, printSymbolRef)
+ case LITERALboolean =>
+ out.print(if (buf.readLong(len) == 0L) " false" else " true")
+ case LITERALbyte =>
+ out.print(" " + buf.readLong(len).toByte)
+ case LITERALshort =>
+ out.print(" " + buf.readLong(len).toShort)
+ case LITERALchar =>
+ out.print(" " + buf.readLong(len).toChar)
+ case LITERALint =>
+ out.print(" " + buf.readLong(len).toInt)
+ case LITERALlong =>
+ out.print(" " + buf.readLong(len))
+ case LITERALfloat =>
+ out.print(" " + intBitsToFloat(buf.readLong(len).toInt))
+ case LITERALdouble =>
+ out.print(" " + longBitsToDouble(buf.readLong(len)))
+ case LITERALstring =>
+ printNameRef()
+ case LITERALenum =>
+ printSymbolRef()
+ case LITERALnull =>
+ out.print(" <null>")
+ case LITERALclass =>
+ printTypeRef()
+ case CHILDREN =>
+ printSymbolRef(); buf.until(end, printSymbolRef)
+ case SYMANNOT =>
+ printSymbolRef(); printTypeRef(); buf.until(end, printAnnotArgRef)
+ case ANNOTATEDtpe =>
+ printTypeRef(); buf.until(end, printAnnotInfoRef)
+ case ANNOTINFO =>
+ printTypeRef(); buf.until(end, printAnnotArgRef)
+ case ANNOTARGARRAY =>
+ buf.until(end, printConstAnnotArgRef)
+ case EXISTENTIALtpe =>
+ printTypeRef(); buf.until(end, printSymbolRef)
+
+ case _ =>
+ }
+ out.println()
+ if (buf.readIndex != end) {
+ out.println("BAD ENTRY END: computed = %d, actual = %d, bytes = %s".format(
+ end, buf.readIndex, buf.bytes.slice(index(i), (end max buf.readIndex)).mkString(", ")
+ ))
+ }
+ }
+
+ for (i <- 0 until index.length) printEntry(i)
+ }
+
+/*
+ *
+ def fromFile(path: String) = fromBytes(io.File(path).toByteArray)
+ def fromName(name: String) = fromBytes(scalaSigBytesForPath(name) getOrElse Array())
+ def fromBytes(data: => Array[Byte]): Option[PickleBuffer] =
+ try Some(new PickleBuffer(data, 0, data.length))
+ catch { case _: Exception => None }
+
+ def show(what: String, pickle: PickleBuffer) = {
+ Console.println(what)
+ val saved = pickle.readIndex
+ pickle.readIndex = 0
+ printFile(pickle, Console.out)
+ pickle.readIndex = saved
+ }
+
+ def main(args: Array[String]) {
+ args foreach { arg =>
+ (fromFile(arg) orElse fromName(arg)) match {
+ case Some(pb) => show(arg + ":", pb)
+ case _ => Console.println("Cannot read " + arg)
+ }
+ }
+ }*/
+}
diff --git a/compiler/src/dotty/tools/dotc/util/SimpleMap.scala b/compiler/src/dotty/tools/dotc/util/SimpleMap.scala
new file mode 100644
index 000000000..b8668d7e4
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/SimpleMap.scala
@@ -0,0 +1,223 @@
+package dotty.tools.dotc.util
+
+import collection.mutable.ListBuffer
+
+abstract class SimpleMap[K <: AnyRef, +V >: Null <: AnyRef] extends (K => V) {
+ def size: Int
+ def apply(k: K): V
+ def remove(k: K): SimpleMap[K, V]
+ def updated[V1 >: V <: AnyRef](k: K, v: V1): SimpleMap[K, V1]
+ def contains(k: K): Boolean = apply(k) != null
+ def mapValuesNow[V1 >: V <: AnyRef](f: (K, V1) => V1): SimpleMap[K, V1]
+ def foreachBinding(f: (K, V) => Unit): Unit
+ def map2[T](f: (K, V) => T): List[T] = {
+ val buf = new ListBuffer[T]
+ foreachBinding((k, v) => buf += f(k, v))
+ buf.toList
+ }
+ def keys: List[K] = map2((k, v) => k)
+ def toList: List[(K, V)] = map2((k, v) => (k, v))
+ override def toString = {
+ def assocToString(key: K, value: V) = s"$key -> $value"
+ map2(assocToString) mkString ("(", ", ", ")")
+ }
+}
+
+object SimpleMap {
+
+ private val CompactifyThreshold = 4
+
+ private object myEmpty extends SimpleMap[AnyRef, Null] {
+ def size = 0
+ def apply(k: AnyRef) = null
+ def remove(k: AnyRef) = this
+ def updated[V1 >: Null <: AnyRef](k: AnyRef, v: V1) = new Map1(k, v)
+ def mapValuesNow[V1 >: Null <: AnyRef](f: (AnyRef, V1) => V1) = this
+ def foreachBinding(f: (AnyRef, Null) => Unit) = ()
+ }
+
+ def Empty[K <: AnyRef] = myEmpty.asInstanceOf[SimpleMap[K, Null]]
+
+ class Map1[K <: AnyRef, +V >: Null <: AnyRef] (k1: K, v1: V) extends SimpleMap[K, V] {
+ def size = 1
+ def apply(k: K) =
+ if (k == k1) v1
+ else null
+ def remove(k: K) =
+ if (k == k1) Empty.asInstanceOf[SimpleMap[K, V]]
+ else this
+ def updated[V1 >: V <: AnyRef](k: K, v: V1) =
+ if (k == k1) new Map1(k, v)
+ else new Map2(k1, v1, k, v)
+ def mapValuesNow[V1 >: V <: AnyRef](f: (K, V1) => V1) = {
+ val w1 = f(k1, v1)
+ if (v1 eq w1) this else new Map1(k1, w1)
+ }
+ def foreachBinding(f: (K, V) => Unit) = f(k1, v1)
+ }
+
+ class Map2[K <: AnyRef, +V >: Null <: AnyRef] (k1: K, v1: V, k2: K, v2: V) extends SimpleMap[K, V] {
+ def size = 2
+ def apply(k: K) =
+ if (k == k1) v1
+ else if (k == k2) v2
+ else null
+ def remove(k: K) =
+ if (k == k1) new Map1(k2, v2)
+ else if (k == k2) new Map1(k1, v1)
+ else this
+ def updated[V1 >: V <: AnyRef](k: K, v: V1) =
+ if (k == k1) new Map2(k, v, k2, v2)
+ else if (k == k2) new Map2(k1, v1, k, v)
+ else new Map3(k1, v1, k2, v2, k, v)
+ def mapValuesNow[V1 >: V <: AnyRef](f: (K, V1) => V1) = {
+ val w1 = f(k1, v1); val w2 = f(k2, v2)
+ if ((v1 eq w1) && (v2 eq w2)) this
+ else new Map2(k1, w1, k2, w2)
+ }
+ def foreachBinding(f: (K, V) => Unit) = { f(k1, v1); f(k2, v2) }
+ }
+
+ class Map3[K <: AnyRef, +V >: Null <: AnyRef] (k1: K, v1: V, k2: K, v2: V, k3: K, v3: V) extends SimpleMap[K, V] {
+ def size = 3
+ def apply(k: K) =
+ if (k == k1) v1
+ else if (k == k2) v2
+ else if (k == k3) v3
+ else null
+ def remove(k: K) =
+ if (k == k1) new Map2(k2, v2, k3, v3)
+ else if (k == k2) new Map2(k1, v1, k3, v3)
+ else if (k == k3) new Map2(k1, v1, k2, v2)
+ else this
+ def updated[V1 >: V <: AnyRef](k: K, v: V1) =
+ if (k == k1) new Map3(k, v, k2, v2, k3, v3)
+ else if (k == k2) new Map3(k1, v1, k, v, k3, v3)
+ else if (k == k3) new Map3(k1, v1, k2, v2, k, v)
+ else new Map4(k1, v1, k2, v2, k3, v3, k, v)
+ def mapValuesNow[V1 >: V <: AnyRef](f: (K, V1) => V1) = {
+ val w1 = f(k1, v1); val w2 = f(k2, v2); val w3 = f(k3, v3)
+ if ((v1 eq w1) && (v2 eq w2) && (v3 eq w3)) this
+ else new Map3(k1, w1, k2, w2, k3, w3)
+ }
+ def foreachBinding(f: (K, V) => Unit) = { f(k1, v1); f(k2, v2); f(k3, v3) }
+ }
+
+ class Map4[K <: AnyRef, +V >: Null <: AnyRef] (k1: K, v1: V, k2: K, v2: V, k3: K, v3: V, k4: K, v4: V) extends SimpleMap[K, V] {
+ def size = 4
+ def apply(k: K) =
+ if (k == k1) v1
+ else if (k == k2) v2
+ else if (k == k3) v3
+ else if (k == k4) v4
+ else null
+ def remove(k: K) =
+ if (k == k1) new Map3(k2, v2, k3, v3, k4, v4)
+ else if (k == k2) new Map3(k1, v1, k3, v3, k4, v4)
+ else if (k == k3) new Map3(k1, v1, k2, v2, k4, v4)
+ else if (k == k4) new Map3(k1, v1, k2, v2, k3, v3)
+ else this
+ def updated[V1 >: V <: AnyRef](k: K, v: V1) =
+ if (k == k1) new Map4(k, v, k2, v2, k3, v3, k4, v4)
+ else if (k == k2) new Map4(k1, v1, k, v, k3, v3, k4, v4)
+ else if (k == k3) new Map4(k1, v1, k2, v2, k, v, k4, v4)
+ else if (k == k4) new Map4(k1, v1, k2, v2, k3, v3, k, v)
+ else new MapMore(Array[AnyRef](k1, v1, k2, v2, k3, v3, k4, v4, k, v))
+ def mapValuesNow[V1 >: V <: AnyRef](f: (K, V1) => V1) = {
+ val w1 = f(k1, v1); val w2 = f(k2, v2); val w3 = f(k3, v3); val w4 = f(k4, v4)
+ if ((v1 eq w1) && (v2 eq w2) && (v3 eq w3) && (v4 eq w4)) this
+ else new Map4(k1, w1, k2, w2, k3, w3, k4, w4)
+ }
+ def foreachBinding(f: (K, V) => Unit) = { f(k1, v1); f(k2, v2); f(k3, v3); f(k4, v4) }
+ }
+
+ class MapMore[K <: AnyRef, +V >: Null <: AnyRef](bindings: Array[AnyRef]) extends SimpleMap[K, V] {
+ private def key(i: Int): K = bindings(i).asInstanceOf[K]
+ private def value(i: Int): V = bindings(i + 1).asInstanceOf[V]
+
+ def size = bindings.length / 2
+
+ def apply(k: K): V = {
+ var i = 0
+ while (i < bindings.length) {
+ if (bindings(i) eq k) return value(i)
+ i += 2
+ }
+ null
+ }
+
+ def remove(k: K): SimpleMap[K, V] = {
+ var i = 0
+ while (i < bindings.length) {
+ if (bindings(i) eq k) return {
+ if (size == CompactifyThreshold) {
+ var m: SimpleMap[K, V] = Empty[K]
+ for (j <- 0 until bindings.length by 2)
+ if (j != i) m = m.updated(key(j), value(j))
+ m
+ } else {
+ val bindings1 = new Array[AnyRef](bindings.length - 2)
+ Array.copy(bindings, 0, bindings1, 0, i)
+ Array.copy(bindings, i + 2, bindings1, i, bindings1.length - i)
+ new MapMore(bindings1)
+ }
+ }
+ i += 2
+ }
+ this
+ }
+
+ def updated[V1 >: V <: AnyRef](k: K, v: V1): SimpleMap[K, V] = {
+ var i = 0
+ while (i < bindings.length) {
+ if (bindings(i) eq k)
+ return {
+ if (v eq bindings(i + 1)) this
+ else {
+ val bindings1 = bindings.clone
+ bindings1(i + 1) = v
+ new MapMore(bindings1)
+ }
+ }
+ i += 2
+ }
+ val bindings2 = new Array[AnyRef](bindings.length + 2)
+ Array.copy(bindings, 0, bindings2, 0, bindings.length)
+ bindings2(bindings.length) = k
+ bindings2(bindings.length + 1) = v
+ new MapMore(bindings2)
+ }
+
+ override def contains(k: K): Boolean = {
+ var i = 0
+ while (i < bindings.length) {
+ if (bindings(i) eq k) return true
+ i += 2
+ }
+ false
+ }
+
+ def mapValuesNow[V1 >: V <: AnyRef](f: (K, V1) => V1) = {
+ var bindings1: Array[AnyRef] = bindings
+ var i = 0
+ while (i < bindings.length) {
+ val v = value(i)
+ val v1 = f(key(i), v)
+ if ((v1 ne v) && (bindings1 eq bindings))
+ bindings1 = bindings.clone
+ bindings1(i) = bindings(i)
+ bindings1(i + 1) = v1
+ i += 2
+ }
+ if (bindings1 eq bindings) this else new MapMore(bindings1)
+ }
+
+ def foreachBinding(f: (K, V) => Unit) = {
+ var i = 0
+ while (i < bindings.length) {
+ f(key(i), value(i))
+ i += 2
+ }
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala b/compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala
new file mode 100644
index 000000000..93817604e
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala
@@ -0,0 +1,28 @@
+package dotty.tools.dotc.util
+
+/** An efficient implementation of sequences of 16 indexed elements with
+ * values 0..15 in a single Long.
+ *
+ */
+class SixteenNibbles(val bits: Long) extends AnyVal {
+ import SixteenNibbles._
+
+ def apply(idx: Int): Int =
+ (bits >>> (idx * Width)).toInt & Mask
+
+ def updated(idx: Int, value: Int): SixteenNibbles =
+ new SixteenNibbles(
+ (bits & ~(LongMask << (idx * Width))) |
+ ((value & Mask).toLong << (idx * Width)))
+
+ def elements: IndexedSeq[Int] = (0 until 16) map apply
+
+ override def toString =
+ s"SixteenNibbles(${elements.mkString(", ")})"
+}
+
+object SixteenNibbles {
+ final val Width = 4
+ final val Mask = (1 << Width) - 1
+ final val LongMask = Mask.toLong
+}
diff --git a/compiler/src/dotty/tools/dotc/util/SourceFile.scala b/compiler/src/dotty/tools/dotc/util/SourceFile.scala
new file mode 100644
index 000000000..1d4c9c2ab
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/SourceFile.scala
@@ -0,0 +1,145 @@
+package dotty.tools
+package dotc
+package util
+
+import scala.collection.mutable.ArrayBuffer
+import dotty.tools.io._
+import annotation.tailrec
+import java.util.regex.Pattern
+import java.io.IOException
+import Chars._
+import ScriptSourceFile._
+import Positions._
+import scala.io.Codec
+
+import java.util.Optional
+
+object ScriptSourceFile {
+ @sharable private val headerPattern = Pattern.compile("""^(::)?!#.*(\r|\n|\r\n)""", Pattern.MULTILINE)
+ private val headerStarts = List("#!", "::#!")
+
+ def apply(file: AbstractFile, content: Array[Char]) = {
+ /** Length of the script header from the given content, if there is one.
+ * The header begins with "#!" or "::#!" and ends with a line starting
+ * with "!#" or "::!#".
+ */
+ val headerLength =
+ if (headerStarts exists (content startsWith _)) {
+ val matcher = headerPattern matcher content.mkString
+ if (matcher.find) matcher.end
+ else throw new IOException("script file does not close its header with !# or ::!#")
+ } else 0
+ new SourceFile(file, content drop headerLength) {
+ override val underlying = new SourceFile(file, content)
+ }
+ }
+}
+
+case class SourceFile(file: AbstractFile, content: Array[Char]) extends interfaces.SourceFile {
+
+ def this(_file: AbstractFile, codec: Codec) = this(_file, new String(_file.toByteArray, codec.charSet).toCharArray)
+ def this(sourceName: String, cs: Seq[Char]) = this(new VirtualFile(sourceName), cs.toArray)
+ def this(file: AbstractFile, cs: Seq[Char]) = this(file, cs.toArray)
+
+ /** Tab increment; can be overridden */
+ def tabInc = 8
+
+ override def name = file.name
+ override def path = file.path
+ override def jfile = Optional.ofNullable(file.file)
+
+ override def equals(that : Any) = that match {
+ case that : SourceFile => file.path == that.file.path && start == that.start
+ case _ => false
+ }
+ override def hashCode = file.path.## + start.##
+
+ def apply(idx: Int) = content.apply(idx)
+
+ val length = content.length
+
+ /** true for all source files except `NoSource` */
+ def exists: Boolean = true
+
+ /** The underlying source file */
+ def underlying: SourceFile = this
+
+ /** The start of this file in the underlying source file */
+ def start = 0
+
+ def atPos(pos: Position): SourcePosition =
+ if (pos.exists) SourcePosition(underlying, pos)
+ else NoSourcePosition
+
+ def isSelfContained = underlying eq this
+
+ /** Map a position to a position in the underlying source file.
+ * For regular source files, simply return the argument.
+ */
+ def positionInUltimateSource(position: SourcePosition): SourcePosition =
+ SourcePosition(underlying, position.pos shift start)
+
+ private def isLineBreak(idx: Int) =
+ if (idx >= length) false else {
+ val ch = content(idx)
+ // don't identify the CR in CR LF as a line break, since LF will do.
+ if (ch == CR) (idx + 1 == length) || (content(idx + 1) != LF)
+ else isLineBreakChar(ch)
+ }
+
+ private def calculateLineIndices(cs: Array[Char]) = {
+ val buf = new ArrayBuffer[Int]
+ buf += 0
+ for (i <- 0 until cs.length) if (isLineBreak(i)) buf += i + 1
+ buf += cs.length // sentinel, so that findLine below works smoother
+ buf.toArray
+ }
+ private lazy val lineIndices: Array[Int] = calculateLineIndices(content)
+
+ /** Map line to offset of first character in line */
+ def lineToOffset(index: Int): Int = lineIndices(index)
+
+ /** A cache to speed up offsetToLine searches to similar lines */
+ private var lastLine = 0
+
+ /** Convert offset to line in this source file
+ * Lines are numbered from 0
+ */
+ def offsetToLine(offset: Int): Int = {
+ lastLine = Util.bestFit(lineIndices, lineIndices.length, offset, lastLine)
+ lastLine
+ }
+
+ /** The index of the first character of the line containing position `offset` */
+ def startOfLine(offset: Int): Int = {
+ require(offset >= 0)
+ lineToOffset(offsetToLine(offset))
+ }
+
+ /** The start index of the line following the one containing position `offset` */
+ def nextLine(offset: Int): Int =
+ lineToOffset(offsetToLine(offset) + 1 min lineIndices.length - 1)
+
+ /** The content of the line containing position `offset` */
+ def lineContent(offset: Int): String =
+ content.slice(startOfLine(offset), nextLine(offset)).mkString
+
+ /** The column corresponding to `offset`, starting at 0 */
+ def column(offset: Int): Int = {
+ var idx = startOfLine(offset)
+ var col = 0
+ while (idx != offset) {
+ col += (if (content(idx) == '\t') (tabInc - col) % tabInc else 1)
+ idx += 1
+ }
+ col
+ }
+
+ override def toString = file.toString
+}
+
+@sharable object NoSource extends SourceFile("<no source>", Nil) {
+ override def exists = false
+ override def atPos(pos: Position): SourcePosition = NoSourcePosition
+}
+
diff --git a/compiler/src/dotty/tools/dotc/util/SourcePosition.scala b/compiler/src/dotty/tools/dotc/util/SourcePosition.scala
new file mode 100644
index 000000000..aad4995d8
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/SourcePosition.scala
@@ -0,0 +1,57 @@
+package dotty.tools
+package dotc
+package util
+
+import Positions.{Position, NoPosition}
+
+/** A source position is comprised of a position in a source file */
+case class SourcePosition(source: SourceFile, pos: Position, outer: SourcePosition = NoSourcePosition)
+extends interfaces.SourcePosition {
+ def exists = pos.exists
+
+ def lineContent: String = source.lineContent(point)
+
+ def point: Int = pos.point
+ /** The line of the position, starting at 0 */
+ def line: Int = source.offsetToLine(point)
+
+ /** The lines of the position */
+ def lines: List[Int] =
+ List.range(source.offsetToLine(start), source.offsetToLine(end + 1)) match {
+ case Nil => line :: Nil
+ case xs => xs
+ }
+
+ def lineOffsets: List[Int] =
+ lines.map(source.lineToOffset(_))
+
+ def lineContent(lineNumber: Int): String =
+ source.lineContent(source.lineToOffset(lineNumber))
+
+ def beforeAndAfterPoint: (List[Int], List[Int]) =
+ lineOffsets.partition(_ <= point)
+
+ /** The column of the position, starting at 0 */
+ def column: Int = source.column(point)
+
+ def start: Int = pos.start
+ def startLine: Int = source.offsetToLine(start)
+ def startColumn: Int = source.column(start)
+
+ def end: Int = pos.end
+ def endLine: Int = source.offsetToLine(end)
+ def endColumn: Int = source.column(end)
+
+ def withOuter(outer: SourcePosition) = new SourcePosition(source, pos, outer)
+
+ override def toString =
+ if (source.exists) s"${source.file}:${line + 1}"
+ else s"(no source file, offset = ${pos.point})"
+}
+
+/** A sentinel for a non-existing source position */
+@sharable object NoSourcePosition extends SourcePosition(NoSource, NoPosition) {
+ override def toString = "?"
+ override def withOuter(outer: SourcePosition) = outer
+}
+
diff --git a/compiler/src/dotty/tools/dotc/util/Stats.scala b/compiler/src/dotty/tools/dotc/util/Stats.scala
new file mode 100644
index 000000000..b7e0996f5
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/Stats.scala
@@ -0,0 +1,78 @@
+package dotty.tools
+package dotc
+package util
+
+import core.Contexts._
+import collection.mutable
+
+@sharable object Stats {
+
+ final val enabled = false
+
+ /** The period in ms in which stack snapshots are displayed */
+ final val HeartBeatPeriod = 250
+
+ var monitored = false
+
+ @volatile private var stack: List[String] = Nil
+
+ val hits = new mutable.HashMap[String, Int] {
+ override def default(key: String): Int = 0
+ }
+
+ @inline
+ def record(fn: String, n: Int = 1) =
+ if (enabled) doRecord(fn, n)
+
+ private def doRecord(fn: String, n: Int) =
+ if (monitored) {
+ val name = if (fn.startsWith("member-")) "member" else fn
+ hits(name) += n
+ }
+
+ @inline
+ def track[T](fn: String)(op: => T) =
+ if (enabled) doTrack(fn)(op) else op
+
+ def doTrack[T](fn: String)(op: => T) =
+ if (monitored) {
+ stack = fn :: stack
+ record(fn)
+ try op
+ finally stack = stack.tail
+ } else op
+
+ class HeartBeat extends Thread() {
+ @volatile private[Stats] var continue = true
+
+ private def printStack(stack: List[String]): Unit = stack match {
+ case str :: rest =>
+ printStack(rest)
+ print(s"-> $str ")
+ case Nil =>
+ println()
+ print("|")
+ }
+
+ override final def run(): Unit = {
+ Thread.sleep(HeartBeatPeriod)
+ printStack(stack)
+ if (continue) run()
+ }
+ }
+
+ def monitorHeartBeat[T](op: => T)(implicit ctx: Context) = {
+ if (ctx.settings.Yheartbeat.value) {
+ var hb = new HeartBeat()
+ hb.start()
+ monitored = true
+ try op
+ finally {
+ hb.continue = false
+ println()
+ println(hits.toList.sortBy(_._2).map{ case (x, y) => s"$x -> $y" } mkString "\n")
+ println(s"sizes: ${ctx.base.uniquesSizes}")
+ }
+ } else op
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/util/Util.scala b/compiler/src/dotty/tools/dotc/util/Util.scala
new file mode 100644
index 000000000..0d37f687b
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/Util.scala
@@ -0,0 +1,32 @@
+package dotty.tools.dotc.util
+import reflect.ClassTag
+
+object Util {
+
+ /** The index `i` in `candidates.indices` such that `candidates(i) <= x` and
+ * `candidates(i)` is closest to `x`, determined by binary search, or -1
+ * if `x < candidates(0)`.
+ * @param hint If between 0 and `candidates.length` use this
+ * as the first search point, otherwise use
+ * `candidates.length/2`.
+ * @pre candidates is sorted
+ */
+ def bestFit(candidates: Array[Int], length: Int, x: Int, hint: Int = -1): Int = {
+ def recur(lo: Int, hi: Int, mid: Int): Int =
+ if (x < candidates(mid))
+ recur(lo, mid - 1, (lo + mid - 1) / 2)
+ else if (mid + 1 < length && x >= candidates(mid + 1))
+ recur(mid + 1, hi, (mid + 1 + hi) / 2)
+ else mid
+ val initMid = if (0 <= hint && hint < length) hint else length / 2
+ if (length == 0 || x < candidates(0)) -1
+ else recur(0, length, initMid)
+ }
+
+ /** An array twice the size of given array, with existing elements copied over */
+ def dble[T: ClassTag](arr: Array[T]) = {
+ val arr1 = new Array[T](arr.length * 2)
+ Array.copy(arr, 0, arr1, 0, arr.length)
+ arr1
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/util/common.scala b/compiler/src/dotty/tools/dotc/util/common.scala
new file mode 100644
index 000000000..d9798aec5
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/common.scala
@@ -0,0 +1,14 @@
+package dotty.tools.dotc
+package util
+
+import core.Names.Name
+import core.Types.WildcardType
+
+/** Common values hoisted out for performance */
+object common {
+
+ val alwaysTrue = Function.const(true) _
+ val alwaysZero = Function.const(0) _
+ val alwaysWildcardType = Function.const(WildcardType) _
+
+}
diff --git a/compiler/src/dotty/tools/dotc/util/kwords.sc b/compiler/src/dotty/tools/dotc/util/kwords.sc
new file mode 100644
index 000000000..94c17eaf4
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/kwords.sc
@@ -0,0 +1,18 @@
+package dotty.tools.dotc.util
+
+import dotty.tools.dotc.parsing._
+import Scanners._
+import Tokens._
+
+object kwords {
+ println("Welcome to the Scala worksheet") //> Welcome to the Scala worksheet
+ keywords.toList.map(tokenString) //> res0: List[String] = List(if, for, else, this, null, new, with, super, case,
+ //| case class, case object, val, abstract, final, private, protected, override
+ //| , implicit, var, def, type, extends, true, false, object, class, import, pac
+ //| kage, yield, do, trait, sealed, throw, try, catch, finally, while, return, m
+ //| atch, lazy, then, forSome, _, :, =, <-, =>, ';', ';', <:, >:, #, @, <%)
+ keywords.toList.filter(kw => tokenString(kw) == null)
+ //> res1: List[Int] = List()
+ canStartStatTokens contains CASE //> res2: Boolean = false
+
+} \ No newline at end of file
diff --git a/compiler/src/dotty/tools/dotc/util/lrutest.sc b/compiler/src/dotty/tools/dotc/util/lrutest.sc
new file mode 100644
index 000000000..6e6328b24
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/util/lrutest.sc
@@ -0,0 +1,40 @@
+package dotty.tools.dotc.util
+
+object lrutest {
+ println("Welcome to the Scala worksheet") //> Welcome to the Scala worksheet
+ val bits = new SixteenNibbles(0L) //> bits : dotty.tools.dotc.util.SixteenNibbles = SixteenNibbles(0, 0, 0, 0, 0,
+ //| 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
+ bits.updated(1, 3) //> res0: dotty.tools.dotc.util.SixteenNibbles = SixteenNibbles(0, 3, 0, 0, 0, 0
+ //| , 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
+ LRUCache.initialRing //> res1: dotty.tools.dotc.util.SixteenNibbles = SixteenNibbles(1, 2, 3, 4, 5, 6
+ //| , 7, 0, 0, 0, 0, 0, 0, 0, 0, 0)
+ val cache = new LRUCache[String, String] //> cache : dotty.tools.dotc.util.LRUCache[String,String] = LRUCache()
+ cache lookup "hi" //> res2: String = null
+ cache enter ("hi", "x")
+ cache.indices.take(10).toList //> res3: List[Int] = List(7, 0, 1, 2, 3, 4, 5, 6, 7, 0)
+ cache.last //> res4: Int = 6
+ cache lookup "hi" //> res5: String = x
+ cache.indices.take(10).toList //> res6: List[Int] = List(7, 0, 1, 2, 3, 4, 5, 6, 7, 0)
+
+ for (i <- 1 to 10) {
+ if (cache.lookup(i.toString) == null)
+ cache.enter(i.toString, i.toString)
+ }
+
+ cache.indices.take(10).toList //> res7: List[Int] = List(5, 6, 7, 0, 1, 2, 3, 4, 5, 6)
+ cache //> res8: dotty.tools.dotc.util.LRUCache[String,String] = LRUCache(10 -> 10, 9 -
+ //| > 9, 8 -> 8, 7 -> 7, 6 -> 6, 5 -> 5, 4 -> 4, 3 -> 3)
+ cache //> res9: dotty.tools.dotc.util.LRUCache[String,String] = LRUCache(10 -> 10, 9 -
+ //| > 9, 8 -> 8, 7 -> 7, 6 -> 6, 5 -> 5, 4 -> 4, 3 -> 3)
+ cache.lookup("7") //> res10: String = 7
+ cache.indices.take(10).toList //> res11: List[Int] = List(0, 5, 6, 7, 1, 2, 3, 4, 0, 5)
+ cache.keysIterator.toList //> res12: List[String] = List(7, 10, 9, 8, 6, 5, 4, 3)
+ cache.lookup("10") //> res13: String = 10
+ cache.lookup("5") //> res14: String = 5
+ cache //> res15: dotty.tools.dotc.util.LRUCache[String,String] = LRUCache(5 -> 5, 10 -
+ //| > 10, 7 -> 7, 9 -> 9, 8 -> 8, 6 -> 6, 4 -> 4, 3 -> 3)
+ cache.lookup("11") //> res16: String = null
+ cache.enter("11", "!!")
+ cache //> res17: dotty.tools.dotc.util.LRUCache[String,String] = LRUCache(11 -> !!, 5
+ //| -> 5, 10 -> 10, 7 -> 7, 9 -> 9, 8 -> 8, 6 -> 6, 4 -> 4)
+} \ No newline at end of file