summaryrefslogtreecommitdiff
path: root/src/interactive
diff options
context:
space:
mode:
authorSimon Ochsenreither <simon@ochsenreither.de>2013-06-27 02:34:16 +0200
committerSimon Ochsenreither <simon@ochsenreither.de>2013-06-27 13:43:16 +0200
commit51fe664b29edf21cda7e3cc4e05c78dd1a0a43b7 (patch)
tree5278e233f13d0c306e927720afe9cb9ce0fb7445 /src/interactive
parent040830d87f7f6ceb2815b9c69f0679ba6b30753d (diff)
downloadscala-51fe664b29edf21cda7e3cc4e05c78dd1a0a43b7.tar.gz
scala-51fe664b29edf21cda7e3cc4e05c78dd1a0a43b7.tar.bz2
scala-51fe664b29edf21cda7e3cc4e05c78dd1a0a43b7.zip
Move some code from s.t.n.io to s.t.n.interactive
The only usages of scala.tools.nsc.io.{Lexer,Pickler,PrettyWriter, Replayer} can be found in scala.tools.nsc.interactive. Let's move those files closer to their dependencies.
Diffstat (limited to 'src/interactive')
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Global.scala2
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Lexer.scala299
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Pickler.scala377
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Picklers.scala4
-rw-r--r--src/interactive/scala/tools/nsc/interactive/PrettyWriter.scala41
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Replayer.scala74
6 files changed, 794 insertions, 3 deletions
diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala
index 99f2cd4056..5875a44025 100644
--- a/src/interactive/scala/tools/nsc/interactive/Global.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Global.scala
@@ -9,7 +9,7 @@ import java.io.{ PrintWriter, StringWriter, FileReader, FileWriter }
import scala.collection.mutable
import mutable.{LinkedHashMap, SynchronizedMap, HashSet, SynchronizedSet}
import scala.util.control.ControlThrowable
-import scala.tools.nsc.io.{ AbstractFile, LogReplay, Logger, NullLogger, Replayer }
+import scala.tools.nsc.io.{ AbstractFile }
import scala.tools.nsc.util.MultiHashMap
import scala.reflect.internal.util.{ SourceFile, BatchSourceFile, Position, NoPosition }
import scala.tools.nsc.reporters._
diff --git a/src/interactive/scala/tools/nsc/interactive/Lexer.scala b/src/interactive/scala/tools/nsc/interactive/Lexer.scala
new file mode 100644
index 0000000000..82e8de3f3d
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/Lexer.scala
@@ -0,0 +1,299 @@
+package scala.tools.nsc.interactive
+
+import java.io.Reader
+
+/** Companion object of class `Lexer` which defines tokens and some utility concepts
+ * used for tokens and lexers
+ */
+object Lexer {
+
+ /** An exception raised if an input does not correspond to what's expected
+ * @param rdr the lexer from which the bad input is read
+ * @param msg the error message
+ */
+ class MalformedInput(val rdr: Lexer, val msg: String) extends Exception("Malformed JSON input at "+rdr.tokenPos+": "+msg)
+
+ /** The class of tokens, i.e. descriptions of input words (or: lexemes).
+ * @param str the characters making up this token
+ */
+ class Token(val str: String) {
+ override def toString = str
+ }
+
+ /** A subclass of `Token` representing single-character delimiters
+ * @param char the delimiter character making up this token
+ */
+ case class Delim(char: Char) extends Token(s"'$char'")
+
+ /** A subclass of token representing integer literals */
+ case class IntLit(override val str: String) extends Token(str)
+
+ /** A subclass of token representing floating point literals */
+ case class FloatLit(override val str: String) extends Token(str)
+
+ /** A subclass of token representing string literals */
+ case class StringLit(override val str: String) extends Token(str) {
+ override def toString = quoted(str)
+ }
+
+ /** The `true` token */
+ val TrueLit = new Token("true")
+
+ /** The `false` token */
+ val FalseLit = new Token("false")
+
+ /** The `null` token */
+ val NullLit = new Token("null")
+
+ /** The '`(`' token */
+ val LParen = new Delim('(')
+
+ /** The '`)`' token */
+ val RParen = new Delim(')')
+
+ /** The '`{`' token */
+ val LBrace = new Delim('{')
+
+ /** The '`}`' token */
+ val RBrace = new Delim('}')
+
+ /** The '`[`' token */
+ val LBracket = new Delim('[')
+
+ /** The '`]`' token */
+ val RBracket = new Delim(']')
+
+ /** The '`,`' token */
+ val Comma = new Delim(',')
+
+ /** The '`:`' token */
+ val Colon = new Delim(':')
+
+ /** The token representing end of input */
+ val EOF = new Token("<end of input>")
+
+ private def toUDigit(ch: Int): Char = {
+ val d = ch & 0xF
+ (if (d < 10) d + '0' else d - 10 + 'A').toChar
+ }
+
+ private def addToStr(buf: StringBuilder, ch: Char) {
+ ch match {
+ case '"' => buf ++= "\\\""
+ case '\b' => buf ++= "\\b"
+ case '\f' => buf ++= "\\f"
+ case '\n' => buf ++= "\\n"
+ case '\r' => buf ++= "\\r"
+ case '\t' => buf ++= "\\t"
+ case '\\' => buf ++= "\\\\"
+ case _ =>
+ if (' ' <= ch && ch < 128) buf += ch
+ else buf ++= "\\u" += toUDigit(ch >>> 12) += toUDigit(ch >>> 8) += toUDigit(ch >>> 4) += toUDigit(ch.toInt)
+ }
+ }
+
+ /** Returns given string enclosed in `"`-quotes with all string characters escaped
+ * so that they correspond to the JSON standard.
+ * Characters that escaped are: `"`, `\b`, `\f`, `\n`, `\r`, `\t`, `\`.
+ * Furthermore, every other character which is not in the ASCII range 32-127 is
+ * escaped as a four hex-digit unicode character of the form `\ u x x x x`.
+ * @param str the string to be quoted
+ */
+ def quoted(str: String): String = {
+ val buf = new StringBuilder += '\"'
+ str foreach (addToStr(buf, _))
+ buf += '\"'
+ buf.toString
+ }
+
+ private val BUF_SIZE = 2 << 16
+}
+
+import Lexer._
+
+/** A simple lexer for tokens as they are used in JSON, plus parens `(`, `)`
+ * Tokens understood are:
+ *
+ * `(`, `)`, `[`, `]`, `{`, `}`, `:`, `,`, `true`, `false`, `null`,
+ * strings (syntax as in JSON),
+ * integer numbers (syntax as in JSON: -?(0|\d+)
+ * floating point numbers (syntax as in JSON: -?(0|\d+)(\.\d+)?((e|E)(+|-)?\d+)?)
+ * The end of input is represented as its own token, EOF.
+ * Lexers can keep one token lookahead
+ *
+ * @param rd the reader from which characters are read.
+ */
+class Lexer(rd: Reader) {
+
+ /** The last-read character */
+ var ch: Char = 0
+
+ /** The number of characters read so far */
+ var pos: Long = 0
+
+ /** The last-read token */
+ var token: Token = _
+
+ /** The number of characters read before the start of the last-read token */
+ var tokenPos: Long = 0
+
+ private var atEOF: Boolean = false
+ private val buf = new Array[Char](BUF_SIZE)
+ private var nread: Int = 0
+ private var bp = 0
+
+ /** Reads next character into `ch` */
+ def nextChar() {
+ assert(!atEOF)
+ if (bp == nread) {
+ nread = rd.read(buf)
+ bp = 0
+ if (nread <= 0) { ch = 0; atEOF = true; return }
+ }
+ ch = buf(bp)
+ bp += 1
+ pos += 1
+ }
+
+ /** If last-read character equals given character, reads next character,
+ * otherwise raises an error
+ * @param c the given character to compare with last-read character
+ * @throws MalformedInput if character does not match
+ */
+ def acceptChar(c: Char) = if (ch == c) nextChar() else error("'"+c+"' expected")
+
+ private val sb = new StringBuilder
+
+ private def putChar() {
+ sb += ch; nextChar()
+ }
+
+ private def putAcceptString(str: String) {
+ str foreach acceptChar
+ sb ++= str
+ }
+
+ /** Skips whitespace and reads next lexeme into `token`
+ * @throws MalformedInput if lexeme not recognized as a valid token
+ */
+ def nextToken() {
+ sb.clear()
+ while (!atEOF && ch <= ' ') nextChar()
+ tokenPos = pos - 1
+ if (atEOF) token = EOF
+ else ch match {
+ case '(' => putChar(); token = LParen
+ case ')' => putChar(); token = RParen
+ case '{' => putChar(); token = LBrace
+ case '}' => putChar(); token = RBrace
+ case '[' => putChar(); token = LBracket
+ case ']' => putChar(); token = RBracket
+ case ',' => putChar(); token = Comma
+ case ':' => putChar(); token = Colon
+ case 't' => putAcceptString("true"); token = TrueLit
+ case 'f' => putAcceptString("false"); token = FalseLit
+ case 'n' => putAcceptString("null"); token = NullLit
+ case '"' => getString()
+ case '-' | '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' => getNumber()
+ case _ => error("unrecoginezed start of token: '"+ch+"'")
+ }
+ //println("["+token+"]")
+ }
+
+ /** Reads a string literal, and forms a `StringLit` token from it.
+ * Last-read input character `ch` must be opening `"`-quote.
+ * @throws MalformedInput if lexeme not recognized as a string literal.
+ */
+ def getString() {
+ def udigit() = {
+ nextChar()
+ if ('0' <= ch && ch <= '9') ch - '9'
+ else if ('A' <= ch && ch <= 'F') ch - 'A' + 10
+ else if ('a' <= ch && ch <= 'f') ch - 'a' + 10
+ else error("illegal unicode escape character: '"+ch+"'")
+ }
+ val delim = ch
+ nextChar()
+ while (ch != delim && ch >= ' ') {
+ if (ch == '\\') {
+ nextChar()
+ ch match {
+ case '\'' => sb += '\''
+ case '"' => sb += '"'
+ case '\\' => sb += '\\'
+ case '/' => sb += '/'
+ case 'b' => sb += '\b'
+ case 'f' => sb += '\f'
+ case 'n' => sb += '\n'
+ case 'r' => sb += '\r'
+ case 't' => sb += '\t'
+ case 'u' => sb += (udigit() << 12 | udigit() << 8 | udigit() << 4 | udigit()).toChar
+ case _ => error("illegal escape character: '"+ch+"'")
+ }
+ nextChar()
+ } else {
+ putChar()
+ }
+ }
+ acceptChar(delim)
+ token = StringLit(sb.toString)
+ }
+
+ /** Reads a numeric literal, and forms an `IntLit` or `FloatLit` token from it.
+ * Last-read input character `ch` must be either `-` or a digit.
+ * @throws MalformedInput if lexeme not recognized as a numeric literal.
+ */
+ def getNumber() {
+ def digit() =
+ if ('0' <= ch && ch <= '9') putChar()
+ else error("<digit> expected")
+ def digits() =
+ do { digit() } while ('0' <= ch && ch <= '9')
+ var isFloating = false
+ if (ch == '-') putChar()
+ if (ch == '0') digit()
+ else digits()
+ if (ch == '.') {
+ isFloating = true
+ putChar()
+ digits()
+ }
+ if (ch == 'e' || ch == 'E') {
+ isFloating = true
+ putChar()
+ if (ch == '+' || ch == '-') putChar()
+ digits()
+ }
+ token = if (isFloating) FloatLit(sb.toString) else IntLit(sb.toString)
+ }
+
+ /** If current token equals given token, reads next token, otherwise raises an error.
+ * @param t the given token to compare current token with
+ * @throws MalformedInput if the two tokens do not match.
+ */
+ def accept(t: Token) {
+ if (token == t) nextToken()
+ else error(t+" expected, but "+token+" found")
+ }
+
+ /** The current token is a delimiter consisting of given character, reads next token,
+ * otherwise raises an error.
+ * @param ch the given delimiter character to compare current token with
+ * @throws MalformedInput if the current token `token` is not a delimiter, or
+ * consists of a character different from `c`.
+ */
+ def accept(ch: Char) {
+ token match {
+ case Delim(`ch`) => nextToken()
+ case _ => accept(Delim(ch))
+ }
+ }
+
+ /** Always throws a `MalformedInput` exception with given error message.
+ * @param msg the error message
+ */
+ def error(msg: String) = throw new MalformedInput(this, msg)
+
+ nextChar()
+ nextToken()
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/Pickler.scala b/src/interactive/scala/tools/nsc/interactive/Pickler.scala
new file mode 100644
index 0000000000..83f3fab925
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/Pickler.scala
@@ -0,0 +1,377 @@
+package scala.tools.nsc.interactive
+
+import Lexer._
+import java.io.Writer
+import scala.language.implicitConversions
+import scala.reflect.ClassTag
+
+/** An abstract class for writing and reading Scala objects to and
+ * from a legible representation. The presesentation follows the following grammar:
+ * {{{
+ * Pickled = `true` | `false` | `null` | NumericLit | StringLit |
+ * Labelled | Pickled `,` Pickled
+ * Labelled = StringLit `(` Pickled? `)`
+ * }}}
+ *
+ * All ...Lit classes are as in JSON. @see scala.tools.nsc.io.Lexer
+ *
+ * Subclasses of `Pickler` each can write and read individual classes
+ * of values.
+ *
+ * @tparam T the type of values handled by this pickler.
+ *
+ * These Picklers build on the work of Andrew Kennedy. They are most closely inspired by
+ * Iulian Dragos' picklers for Scala to XML. See:
+ *
+ * <a href="http://code.google.com/p/gdata-scala-client/wiki/DevelopersGuide">
+ * http://code.google.com/p/gdata-scala-client/wiki/DevelopersGuide
+ * </a>
+ */
+abstract class Pickler[T] {
+
+ import Pickler._
+
+ /** Writes value in pickled form
+ * @param wr the writer to which pickled form is written
+ * @param x the value to write
+ */
+ def pickle(wr: Writer, x: T)
+
+ /** Reads value from pickled form.
+ *
+ * @param rd the lexer from which lexemes are read
+ * @return An `UnpickleSuccess value if the current input corresponds to the
+ * kind of value that is unpickled by the current subclass of `Pickler`,
+ * an `UnpickleFailure` value otherwise.
+ * @throws `Lexer.MalformedInput` if input is invalid, or if
+ * an `Unpickle
+ */
+ def unpickle(rd: Lexer): Unpickled[T]
+
+ /** A pickler representing a `~`-pair of values as two consecutive pickled
+ * strings, separated by a comma.
+ * @param that the second pickler which together with the current pickler makes
+ * up the pair `this ~ that` to be pickled.
+ */
+ def ~ [U] (that: => Pickler[U]): Pickler[T ~ U] = seqPickler(this, that)
+
+ /** A pickler that adds a label to the current pickler, using the representation
+ * `label ( <current pickler> )`
+ *
+ * @label the string to be added as a label.
+ */
+ def labelled(label: String): Pickler[T] = labelledPickler(label, this)
+
+ /** A pickler obtained from the current pickler by a pair of transformer functions
+ * @param in the function that maps values handled by the current pickler to
+ * values handled by the wrapped pickler.
+ * @param out the function that maps values handled by the wrapped pickler to
+ * values handled by the current pickler.
+ */
+ def wrapped [U] (in: T => U)(out: U => T): Pickler[U] = wrappedPickler(this)(in)(out)
+
+ /** A conditional pickler obtained from the current pickler.
+ * @param p the condition to test to find out whether pickler can handle
+ * some Scala value.
+ */
+ def cond(p: Any => Boolean): CondPickler[T] = conditionalPickler(this, p)
+
+ /** A conditional pickler handling values of some Scala class. It adds the
+ * class name as a label to the representation of the current pickler and
+ * @param c the class of values handled by this pickler.
+ */
+ def asClass[U <: T](c: Class[U]): CondPickler[T] = this.labelled(c.getName).cond(c isInstance _)
+}
+
+object Pickler {
+ /** A base class representing unpickler result. It has two subclasses:
+ * `UnpickleSucess` for successful unpicklings and `UnpickleFailure` for failures,
+ * where a value of the given type `T` could not be unpickled from input.
+ * @tparam T the type of unpickled values in case of success.
+ */
+ abstract class Unpickled[+T] {
+ /** Transforms success values to success values using given function,
+ * leaves failures alone
+ * @param f the function to apply.
+ */
+ def map[U](f: T => U): Unpickled[U] = this match {
+ case UnpickleSuccess(x) => UnpickleSuccess(f(x))
+ case f: UnpickleFailure => f
+ }
+ /** Transforms success values to successes or failures using given function,
+ * leaves failures alone.
+ * @param f the function to apply.
+ */
+ def flatMap[U](f: T => Unpickled[U]): Unpickled[U] = this match {
+ case UnpickleSuccess(x) => f(x)
+ case f: UnpickleFailure => f
+ }
+ /** Tries alternate expression if current result is a failure
+ * @param alt the alternate expression to be tried in case of failure
+ */
+ def orElse[U >: T](alt: => Unpickled[U]): Unpickled[U] = this match {
+ case UnpickleSuccess(x) => this
+ case f: UnpickleFailure => alt
+ }
+
+ /** Transforms failures into thrown `MalformedInput` exceptions.
+ * @throws MalformedInput if current result is a failure
+ */
+ def requireSuccess: UnpickleSuccess[T] = this match {
+ case s @ UnpickleSuccess(x) => s
+ case f: UnpickleFailure =>
+ throw new MalformedInput(f.rd, "Unrecoverable unpickle failure:\n"+f.errMsg)
+ }
+ }
+
+ /** A class representing successful unpicklings
+ * @tparam T the type of the unpickled value
+ * @param result the unpickled value
+ */
+ case class UnpickleSuccess[+T](result: T) extends Unpickled[T]
+
+ /** A class representing unpickle failures
+ * @param msg an error message describing what failed.
+ * @param rd the lexer unpickled values were read from (can be used to get
+ * error position, for instance).
+ */
+ class UnpickleFailure(msg: => String, val rd: Lexer) extends Unpickled[Nothing] {
+ def errMsg = msg
+ override def toString = "Failure at "+rd.tokenPos+":\n"+msg
+ }
+
+ private def errorExpected(rd: Lexer, msg: => String) =
+ new UnpickleFailure("expected: "+msg+"\n" +
+ "found : "+rd.token,
+ rd)
+
+ private def nextSuccess[T](rd: Lexer, result: T) = {
+ rd.nextToken()
+ UnpickleSuccess(result)
+ }
+
+ /** The implicit `Pickler` value for type `T`. Equivalent to `implicitly[Pickler[T]]`.
+ */
+ def pkl[T: Pickler] = implicitly[Pickler[T]]
+
+ /** A class represenenting `~`-pairs */
+ case class ~[+S, +T](fst: S, snd: T)
+
+ /** A wrapper class to be able to use `~` s an infix method */
+ implicit class TildeDecorator[S](x: S) {
+ /** Infix method that forms a `~`-pair. */
+ def ~ [T](y: T): S ~ T = new ~ (x, y)
+ }
+
+ /** Same as `p.labelled(label)`.
+ */
+ def labelledPickler[T](label: String, p: Pickler[T]): Pickler[T] = new Pickler[T] {
+ def pickle(wr: Writer, x: T) = {
+ wr.write(quoted(label))
+ wr.write("(")
+ p.pickle(wr, x)
+ wr.write(")")
+ }
+ def unpickle(rd: Lexer): Unpickled[T] =
+ rd.token match {
+ case StringLit(`label`) =>
+ rd.nextToken()
+ rd.accept('(')
+ val result = p.unpickle(rd).requireSuccess
+ rd.accept(')')
+ result
+ case _ =>
+ errorExpected(rd, quoted(label)+"(...)")
+ }
+ }
+
+ /** Same as `p.wrap(in)(out)`
+ */
+ def wrappedPickler[S, T](p: Pickler[S])(in: S => T)(out: T => S) = new Pickler[T] {
+ def pickle(wr: Writer, x: T) = p.pickle(wr, out(x))
+ def unpickle(rd: Lexer) = p.unpickle(rd) map in
+ }
+
+ /** Same as `p.cond(condition)`
+ */
+ def conditionalPickler[T](p: Pickler[T], condition: Any => Boolean) = new CondPickler[T](condition) {
+ def pickle(wr: Writer, x: T) = p.pickle(wr, x)
+ def unpickle(rd: Lexer) = p.unpickle(rd)
+ }
+
+ /** Same as `p ~ q`
+ */
+ def seqPickler[T, U](p: Pickler[T], q: => Pickler[U]) = new Pickler[T ~ U] {
+ lazy val qq = q
+ def pickle(wr: Writer, x: T ~ U) = {
+ p.pickle(wr, x.fst)
+ wr.write(',')
+ q.pickle(wr, x.snd)
+ }
+ def unpickle(rd: Lexer) =
+ for (x <- p.unpickle(rd); y <- { rd.accept(','); qq.unpickle(rd).requireSuccess })
+ yield x ~ y
+ }
+
+ /** Same as `p | q`
+ */
+ def eitherPickler[T, U <: T, V <: T](p: CondPickler[U], q: => CondPickler[V]) =
+ new CondPickler[T](x => p.canPickle(x) || q.canPickle(x)) {
+ lazy val qq = q
+ override def tryPickle(wr: Writer, x: Any): Boolean =
+ p.tryPickle(wr, x) || qq.tryPickle(wr, x)
+ def pickle(wr: Writer, x: T) =
+ require(tryPickle(wr, x),
+ "no pickler found for "+x+" of class "+x.getClass.getName)
+ def unpickle(rd: Lexer) = p.unpickle(rd) orElse qq.unpickle(rd)
+ }
+
+ /** A conditional pickler for singleton objects. It represents these
+ * with the object's underlying class as a label.
+ * Example: Object scala.None would be represented as `scala.None$()`.
+ */
+ def singletonPickler[T <: AnyRef](x: T): CondPickler[T] =
+ unitPickler
+ .wrapped { _ => x } { x => () }
+ .labelled (x.getClass.getName)
+ .cond (x eq _.asInstanceOf[AnyRef])
+
+ /** A pickler the handles instances of classes that have an empty constructor.
+ * It represents than as `$new ( <name of class> )`.
+ * When unpickling, a new instance of the class is created using the empty
+ * constructor of the class via `Class.forName(<name of class>).newInstance()`.
+ */
+ def javaInstancePickler[T <: AnyRef]: Pickler[T] =
+ (stringPickler labelled "$new")
+ .wrapped { name => Class.forName(name).newInstance().asInstanceOf[T] } { _.getClass.getName }
+
+ /** A picklers that handles iterators. It pickles all values
+ * returned by an iterator separated by commas.
+ * When unpickling, it always returns an `UnpickleSuccess` containing an iterator.
+ * This iterator returns 0 or more values that are obtained by unpickling
+ * until a closing parenthesis, bracket or brace or the end of input is encountered.
+ *
+ * This means that iterator picklers should not be directly followed by `~`
+ * because the pickler would also read any values belonging to the second
+ * part of the `~`-pair.
+ *
+ * What's usually done instead is that the iterator pickler is wrapped and labelled
+ * to handle other kinds of sequences.
+ */
+ implicit def iterPickler[T: Pickler]: Pickler[Iterator[T]] = new Pickler[Iterator[T]] {
+ lazy val p = pkl[T]
+ def pickle(wr: Writer, xs: Iterator[T]) {
+ var first = true
+ for (x <- xs) {
+ if (first) first = false else wr.write(',')
+ p.pickle(wr, x)
+ }
+ }
+ def unpickle(rd: Lexer): Unpickled[Iterator[T]] = UnpickleSuccess(new Iterator[T] {
+ var first = true
+ def hasNext = {
+ val t = rd.token
+ t != EOF && t != RParen && t != RBrace && t != RBracket
+ }
+ def next(): T = {
+ if (first) first = false else rd.accept(',')
+ p.unpickle(rd).requireSuccess.result
+ }
+ })
+ }
+
+ /** A pickler that handles values that can be represented as a single token.
+ * @param kind the kind of token representing the value, used in error messages
+ * for unpickling.
+ * @param matcher A partial function from tokens to handled values. Unpickling
+ * succeeds if the matcher function is defined on the current token.
+ */
+ private def tokenPickler[T](kind: String)(matcher: PartialFunction[Token, T]) = new Pickler[T] {
+ def pickle(wr: Writer, x: T) = wr.write(x.toString)
+ def unpickle(rd: Lexer) =
+ if (matcher isDefinedAt rd.token) nextSuccess(rd, matcher(rd.token))
+ else errorExpected(rd, kind)
+ }
+
+ /** A pickler for values of type `Long`, represented as integer literals */
+ implicit val longPickler: Pickler[Long] =
+ tokenPickler("integer literal") { case IntLit(s) => s.toLong }
+
+ /** A pickler for values of type `Int`, represented as integer literals */
+ implicit val intPickler: Pickler[Int] = longPickler.wrapped { _.toInt } { _.toLong }
+
+ /** A conditional pickler for the boolean value `true` */
+ private val truePickler =
+ tokenPickler("boolean literal") { case TrueLit => true } cond { _ == true }
+
+ /** A conditional pickler for the boolean value `false` */
+ private val falsePickler =
+ tokenPickler("boolean literal") { case FalseLit => false } cond { _ == false }
+
+ /** A pickler for values of type `Boolean`, represented as the literals `true` or `false`. */
+ implicit def booleanPickler: Pickler[Boolean] = truePickler | falsePickler
+
+ /** A pickler for values of type `Unit`, represented by the empty character string */
+ implicit val unitPickler: Pickler[Unit] = new Pickler[Unit] {
+ def pickle(wr: Writer, x: Unit) {}
+ def unpickle(rd: Lexer): Unpickled[Unit] = UnpickleSuccess(())
+ }
+
+ /** A pickler for values of type `String`, represented as string literals */
+ implicit val stringPickler: Pickler[String] = new Pickler[String] {
+ def pickle(wr: Writer, x: String) = wr.write(if (x == null) "null" else quoted(x))
+ def unpickle(rd: Lexer) = rd.token match {
+ case StringLit(s) => nextSuccess(rd, s)
+ case NullLit => nextSuccess(rd, null)
+ case _ => errorExpected(rd, "string literal")
+ }
+ }
+
+ /** A pickler for pairs, represented as `~`-pairs */
+ implicit def tuple2Pickler[T1: Pickler, T2: Pickler]: Pickler[(T1, T2)] =
+ (pkl[T1] ~ pkl[T2])
+ .wrapped { case x1 ~ x2 => (x1, x2) } { case (x1, x2) => x1 ~ x2 }
+ .labelled ("tuple2")
+
+ /** A pickler for 3-tuples, represented as `~`-tuples */
+ implicit def tuple3Pickler[T1, T2, T3](implicit p1: Pickler[T1], p2: Pickler[T2], p3: Pickler[T3]): Pickler[(T1, T2, T3)] =
+ (p1 ~ p2 ~ p3)
+ .wrapped { case x1 ~ x2 ~ x3 => (x1, x2, x3) } { case (x1, x2, x3) => x1 ~ x2 ~ x3 }
+ .labelled ("tuple3")
+
+ /** A pickler for list values */
+ implicit def listPickler[T: Pickler]: Pickler[List[T]] =
+ iterPickler[T] .wrapped { _.toList } { _.iterator } .labelled ("scala.List")
+}
+
+/** A subclass of Pickler can indicate whether a particular value can be pickled by instances
+ * of this class.
+ * @param canPickle The predicate that indicates whether a given value
+ * can be pickled by instances of this class.
+ */
+abstract class CondPickler[T](val canPickle: Any => Boolean) extends Pickler[T] {
+ import Pickler._
+
+ /** Pickles given value `x` if possible, as indicated by `canPickle(x)`.
+ */
+ def tryPickle(wr: Writer, x: Any): Boolean = {
+ val result = canPickle(x)
+ if (result) pickle(wr, x.asInstanceOf[T])
+ result
+ }
+
+ /** A pickler obtained from this pickler and an alternative pickler.
+ * To pickle a value, this pickler is tried first. If it cannot handle
+ * the object (as indicated by its `canPickle` test), then the
+ * alternative pickler is tried.
+ * To unpickle a value, this unpickler is tried first. If it cannot read
+ * the input (as indicated by a `UnpickleFailure` result), then the
+ * alternative pickler is tried.
+ * @tparam V The handled type of the returned pickler.
+ * @tparam U The handled type of the alternative pickler.
+ * @param that The alternative pickler.
+ */
+ def | [V >: T, U <: V] (that: => CondPickler[U]): CondPickler[V] =
+ eitherPickler[V, T, U](this, that)
+}
+
diff --git a/src/interactive/scala/tools/nsc/interactive/Picklers.scala b/src/interactive/scala/tools/nsc/interactive/Picklers.scala
index 900a06333d..e75b4a3cc6 100644
--- a/src/interactive/scala/tools/nsc/interactive/Picklers.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Picklers.scala
@@ -7,10 +7,10 @@ package interactive
import util.InterruptReq
import scala.reflect.internal.util.{ SourceFile, BatchSourceFile }
-import io.{ AbstractFile, PlainFile, Pickler, CondPickler }
+import io.{ AbstractFile, PlainFile }
import util.EmptyAction
import scala.reflect.internal.util.{ RangePosition, OffsetPosition, TransparentPosition }
-import io.Pickler._
+import Pickler._
import scala.collection.mutable
import mutable.ListBuffer
diff --git a/src/interactive/scala/tools/nsc/interactive/PrettyWriter.scala b/src/interactive/scala/tools/nsc/interactive/PrettyWriter.scala
new file mode 100644
index 0000000000..d7dadcc6a8
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/PrettyWriter.scala
@@ -0,0 +1,41 @@
+package scala.tools.nsc.interactive
+
+import java.io.Writer
+
+class PrettyWriter(wr: Writer) extends Writer {
+ protected val indentStep = " "
+ private var indent = 0
+ private def newLine() {
+ wr.write('\n')
+ wr.write(indentStep * indent)
+ }
+ def close() = wr.close()
+ def flush() = wr.flush()
+ def write(str: Array[Char], off: Int, len: Int): Unit = {
+ if (off < str.length && off < len) {
+ str(off) match {
+ case '{' | '[' | '(' =>
+ indent += 1
+ wr.write(str(off).toInt)
+ newLine()
+ wr.write(str, off + 1, len - 1)
+ case '}' | ']' | ')' =>
+ wr.write(str, off, len)
+ indent -= 1
+ case ',' =>
+ wr.write(',')
+ newLine()
+ wr.write(str, off + 1, len - 1)
+ case ':' =>
+ wr.write(':')
+ wr.write(' ')
+ wr.write(str, off + 1, len - 1)
+ case _ =>
+ wr.write(str, off, len)
+ }
+ } else {
+ wr.write(str, off, len)
+ }
+ }
+ override def toString = wr.toString
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/Replayer.scala b/src/interactive/scala/tools/nsc/interactive/Replayer.scala
new file mode 100644
index 0000000000..0e3e2493fe
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/Replayer.scala
@@ -0,0 +1,74 @@
+package scala.tools.nsc.interactive
+
+import java.io.{Reader, Writer}
+
+import Pickler._
+import Lexer.EOF
+
+abstract class LogReplay {
+ def logreplay(event: String, x: => Boolean): Boolean
+ def logreplay[T: Pickler](event: String, x: => Option[T]): Option[T]
+ def close()
+ def flush()
+}
+
+class Logger(wr0: Writer) extends LogReplay {
+ val wr = new PrettyWriter(wr0)
+ private var first = true
+ private def insertComma() = if (first) first = false else wr.write(",")
+
+ def logreplay(event: String, x: => Boolean) = {
+ val xx = x
+ if (xx) { insertComma(); pkl[Unit].labelled(event).pickle(wr, ()) }
+ xx
+ }
+ def logreplay[T: Pickler](event: String, x: => Option[T]) = {
+ val xx = x
+ xx match {
+ case Some(y) => insertComma(); pkl[T].labelled(event).pickle(wr, y)
+ case None =>
+ }
+ xx
+ }
+ def close() { wr.close() }
+ def flush() { wr.flush() }
+}
+
+object NullLogger extends LogReplay {
+ def logreplay(event: String, x: => Boolean) = x
+ def logreplay[T: Pickler](event: String, x: => Option[T]) = x
+ def close() {}
+ def flush() {}
+}
+
+class Replayer(raw: Reader) extends LogReplay {
+ private val rd = new Lexer(raw)
+ private var nextComma = false
+
+ private def eatComma() =
+ if (nextComma) { rd.accept(','); nextComma = false }
+
+ def logreplay(event: String, x: => Boolean) =
+ if (rd.token == EOF) NullLogger.logreplay(event, x)
+ else {
+ eatComma()
+ pkl[Unit].labelled(event).unpickle(rd) match {
+ case UnpickleSuccess(_) => nextComma = true; true
+ case _ => false
+ }
+ }
+
+ def logreplay[T: Pickler](event: String, x: => Option[T]) =
+ if (rd.token == EOF) NullLogger.logreplay(event, x)
+ else {
+ eatComma()
+ pkl[T].labelled(event).unpickle(rd) match {
+ case UnpickleSuccess(y) => nextComma = true; Some(y)
+ case _ => None
+ }
+ }
+
+ def close() { raw.close() }
+ def flush() {}
+}
+