aboutsummaryrefslogtreecommitdiff
path: root/yamlesque/src
diff options
context:
space:
mode:
Diffstat (limited to 'yamlesque/src')
-rw-r--r--yamlesque/src/Parser.scala406
-rw-r--r--yamlesque/src/Writer.scala74
-rw-r--r--yamlesque/src/YamlNodes.scala81
-rw-r--r--yamlesque/src/main/scala/YamlParser.scala258
-rw-r--r--yamlesque/src/main/scala/YamlPrinter.scala48
-rw-r--r--yamlesque/src/main/scala/formats.scala8
-rw-r--r--yamlesque/src/main/scala/package.scala26
-rw-r--r--yamlesque/src/main/scala/yamlValues.scala23
-rw-r--r--yamlesque/src/package.scala30
-rw-r--r--yamlesque/src/test/scala/ParserTests.scala221
10 files changed, 591 insertions, 584 deletions
diff --git a/yamlesque/src/Parser.scala b/yamlesque/src/Parser.scala
new file mode 100644
index 0000000..6f6c01f
--- /dev/null
+++ b/yamlesque/src/Parser.scala
@@ -0,0 +1,406 @@
+package yamlesque
+import java.io.Reader
+
+trait Tokenizer {
+
+ def in: Reader
+
+ protected sealed trait TokenKind
+ protected case object Key extends TokenKind
+ protected case object Item extends TokenKind
+ protected case object Scalar extends TokenKind
+ protected case object Start extends TokenKind
+ protected case object End extends TokenKind
+
+ protected case object QuotedString extends TokenKind // "", may contain comment
+ protected case object Verbatim extends TokenKind // | or >
+
+ protected val EOF = -1.toChar
+
+ private var line = 1
+ private var col = 0
+ protected var ch: Char = 0
+ private var cr: Boolean = false // was the previous char a carriage return?
+
+ protected var tokenKind: TokenKind = End
+ protected var tokenValue: String = ""
+ protected var tokenLine: Int = 1
+ protected var tokenCol: Int = 1
+
+ private def readChar(): Unit = if (ch != EOF) {
+ ch = in.read().toChar
+ col += 1
+ if (cr) {
+ cr = false
+ line += 1
+ col = 1
+ }
+ if (ch == '\n') {
+ cr = true
+ }
+ }
+ readChar()
+
+ @inline private def accept(c: Char) =
+ if (ch == c) {
+ readChar(); true
+ } else false
+
+ @inline private def skipSpace(): Unit = while (ch == ' ') readChar()
+
+ private val buffer = new StringBuilder
+
+ @inline private def nextStringOrKey() = {
+ var done = false
+ while (!done) {
+ if (accept('\n') || accept(EOF)) {
+ tokenKind = Scalar
+ tokenValue = buffer.result().trim()
+ done = true
+ } else if (accept(' ')) {
+ if (ch == '#') {
+ tokenKind = Scalar
+ tokenValue = buffer.result().trim()
+ done = true
+ } else {
+ buffer += ' '
+ }
+ } else if (accept(':')) {
+ if (accept(' ') || accept('\n') || accept(EOF)) {
+ tokenKind = Key
+ tokenValue = buffer.result().trim()
+ done = true
+ } else {
+ buffer += ':'
+ buffer += ch
+ readChar()
+ }
+ } else {
+ buffer += ch
+ readChar()
+ }
+ }
+ }
+
+ @inline private def nextQuoteOrKey() = {
+ buffer.clear()
+ while (ch != '"' && ch != EOF) {
+ if (accept('\\')) {
+ if (ch != EOF) {
+ buffer += ch
+ readChar()
+ }
+ } else {
+ buffer += ch
+ readChar()
+ }
+ }
+ readChar()
+ tokenValue = buffer.result()
+
+ skipSpace()
+ if (accept(':')) {
+ if (accept(' ') || accept('\n') || accept(EOF)) {
+ tokenKind = Key
+ } else {
+ // this is an irrefular situation and the parser will error out later
+ tokenKind = QuotedString
+ }
+ } else {
+ tokenKind = QuotedString
+ }
+ }
+
+ @annotation.tailrec
+ @inline
+ protected final def nextToken(): Unit = {
+ buffer.clear()
+ skipSpace()
+ if (accept(EOF)) {
+ tokenKind = End
+ tokenLine = line
+ tokenCol = col - 1
+ } else if (accept('#')) {
+ while (ch != '\n' && ch != EOF) {
+ readChar()
+ }
+ nextToken()
+ } else if (accept('\n')) {
+ nextToken()
+ } else if (accept('-')) {
+ tokenLine = line
+ tokenCol = col - 1
+ if (accept('-')) {
+ if (accept('-')) {
+ while (ch != '\n' && ch != EOF) readChar()
+ tokenKind = Start
+ } else {
+ buffer ++= "--"
+ buffer += ch
+ readChar()
+ nextStringOrKey()
+ }
+ } else if (accept(' ') || accept('\n') || accept(EOF)) {
+ tokenKind = Item
+ } else {
+ buffer += '-'
+ buffer += ch
+ readChar()
+ nextStringOrKey()
+ }
+ } else if (ch == '|' || ch == '>') {
+ var marker = ch
+ readChar()
+ if (accept('\n') || accept(EOF)) {
+ nextVerbatimBlock(tokenCol, marker == '>')
+ } else {
+ buffer += marker
+ buffer += ch
+ readChar()
+ nextStringOrKey()
+ }
+ } else if (accept('"')) {
+ tokenLine = line
+ tokenCol = col - 1
+ nextQuoteOrKey()
+ } else {
+ tokenLine = line
+ tokenCol = col
+ nextStringOrKey()
+ }
+ }
+ nextToken()
+
+ protected def nextVerbatimBlock(minCol: Int, foldLines: Boolean) = {
+ buffer.clear()
+ var startCol = 0
+ var lastNonEmptyLine = line
+
+ // find start column, whitespace is significant
+ while (accept('\n')) {
+ buffer += '\n'
+ }
+ skipSpace()
+ startCol = col
+ tokenLine = line
+
+ if (startCol <= minCol) {
+ tokenCol = minCol + 1
+ tokenKind = Verbatim
+ tokenValue = ""
+ } else {
+ var done = false
+ while (!done) {
+ // skip spaces until we reach starting column
+ while (ch == ' ' && col < startCol && ch != EOF) readChar()
+
+ if (ch == '\n') {
+ readChar()
+ done = ch == EOF
+ } else if (col == startCol) {
+ for (i <- lastNonEmptyLine until line - 1) { buffer += '\n' }
+ lastNonEmptyLine = line
+ var eol = false
+ while (!eol) {
+ if (ch == '\n' || ch == EOF) eol = true
+ if (ch != EOF) {
+ buffer += ch
+ readChar()
+ }
+ }
+ done = ch == EOF
+ } else {
+ done = true
+ }
+ }
+
+ tokenKind = Verbatim
+ tokenCol = startCol
+ tokenValue = buffer.result()
+ }
+ }
+
+}
+
+object Parser {
+ case class ParseException(message: String) extends RuntimeException(message)
+}
+
+class Parser(val in: Reader) extends Tokenizer with Iterator[Node] {
+
+ private def friendlyKind(kind: TokenKind) = kind match {
+ case Key => "map key"
+ case Item => "list item"
+ case Scalar => "scalar"
+ case QuotedString => "string"
+ case Verbatim => "verbatim block"
+ case Start => "start of document"
+ case End => "EOF"
+ }
+
+ private def friendlyValue = tokenKind match {
+ case Key => tokenValue + ":"
+ case Item => "-"
+ case Scalar => tokenValue
+ case QuotedString => s""""$tokenValue""""
+ case Verbatim => "verbatim block " + tokenValue.takeWhile(_ != '\n') + "..."
+ case Start => "---"
+ case End => "EOF"
+ }
+
+ private def fatal(message: String): Nothing = {
+ val info = s"$tokenLine:$tokenCol: $message\n"
+ val token = (" " * tokenCol) + friendlyValue + "\n"
+ val caret = (" " * tokenCol) + "^\n"
+ throw new Parser.ParseException(info + token + caret)
+ }
+
+ private var node: Node = null
+
+ // the first document does not strictly need to be started with a ---
+ private def initDocument() = {
+ if (tokenKind == Start) {
+ nextDocument()
+ } else {
+ nextNode()
+ }
+ }
+ initDocument()
+
+ // subsequent documents require an explicit start
+ private def nextDocument() = {
+ tokenKind match {
+ case Start =>
+ nextToken()
+ nextNode()
+ case _ =>
+ fatal(
+ s"expected ${friendlyKind(Start)}, but found ${friendlyKind(tokenKind)}"
+ )
+ }
+ }
+
+ private def nextNode(): Unit = {
+ tokenKind match {
+ case Key => nextMap()
+ case Item => nextList()
+ case Scalar => nextString()
+ case QuotedString =>
+ node = Str(tokenValue)
+ nextToken()
+ case Verbatim =>
+ node = Str(tokenValue)
+ nextToken()
+ case Start | End =>
+ node = Null
+ }
+ }
+
+ private def nextMap(): Unit = {
+ val y = Obj()
+ val startCol = tokenCol
+
+ do {
+ if (tokenKind != Key) {
+ fatal(
+ s"expected ${friendlyKind(Key)}, but found ${friendlyKind(tokenKind)}"
+ )
+ }
+ if (tokenCol != startCol) {
+ fatal(s"${friendlyKind(Key)} is not aligned")
+ }
+
+ val key = tokenValue
+ nextToken()
+
+ tokenKind match {
+ case Start | End =>
+ y.obj(key) = Null
+ // special case: we allow lists to start after a key without requiring an indent
+ case Item if tokenCol == startCol =>
+ nextNode()
+ y.obj(key) = node
+ case _ if tokenCol <= startCol =>
+ y.obj(key) = Null
+ case _ =>
+ nextNode()
+ y.obj(key) = node
+ }
+ } while (tokenCol >= startCol && tokenKind != Start && tokenKind != End)
+ node = y
+ }
+
+ private def nextList(): Unit = {
+ val y = Arr()
+ val startCol = tokenCol
+
+ do {
+ if (tokenKind != Item) {
+ fatal(
+ s"expected ${friendlyKind(Item)}, but found ${friendlyKind(tokenKind)}"
+ )
+ }
+ if (tokenCol != startCol) {
+ fatal(s"${friendlyKind(Item)} is not aligned")
+ }
+
+ nextToken()
+
+ tokenKind match {
+ case Start | End =>
+ y.arr += Null
+ case _ if tokenCol <= startCol =>
+ y.arr += Null
+ case _ =>
+ nextNode()
+ y.arr += node
+ }
+ } while (tokenCol >= startCol && tokenKind != Start && tokenKind != End)
+ node = y
+ }
+
+ private def nextString(): Unit = {
+ val buffer = new StringBuilder
+ val startCol = tokenCol
+
+ buffer ++= tokenValue
+ nextToken()
+
+ while (tokenCol >= startCol && tokenKind != Start && tokenKind != End) {
+ if (tokenKind != Scalar) {
+ fatal(
+ s"expected ${friendlyKind(Scalar)}, but found ${friendlyKind(tokenKind)}"
+ )
+ }
+ buffer += ' '
+ buffer ++= tokenValue
+ nextToken()
+ }
+ node = specializeString(buffer.result())
+ }
+
+ private def specializeString(str: String) = str match {
+ case "null" => Null
+ case "true" => Bool(true)
+ case "false" => Bool(false)
+ case s =>
+ try {
+ Num(s.toDouble)
+ } catch {
+ case _: NumberFormatException => Str(s)
+ }
+ }
+
+ private var reachedEnd = false
+ def hasNext: Boolean = !reachedEnd
+ def next(): Node = {
+ val result = node
+ if (tokenKind == End) {
+ reachedEnd = true
+ } else {
+ nextDocument()
+ }
+ result
+ }
+
+}
diff --git a/yamlesque/src/Writer.scala b/yamlesque/src/Writer.scala
new file mode 100644
index 0000000..e387ef9
--- /dev/null
+++ b/yamlesque/src/Writer.scala
@@ -0,0 +1,74 @@
+package yamlesque
+
+object Writer {
+
+ def write(nodes: Iterable[Node]): String = {
+ val buffer = new StringBuilder
+ write(nodes, buffer)
+ buffer.result()
+ }
+ def write(nodes: Iterable[Node], buffer: StringBuilder): Unit = {
+ val it = nodes.iterator
+ while (it.hasNext) {
+ writeCompact(buffer, true, 0, it.next())
+ if (it.hasNext) buffer ++= "---\n"
+ }
+ }
+
+ private def writeCompact(
+ buffer: StringBuilder,
+ startOfLine: Boolean,
+ indent: Int,
+ node: Node
+ ): Unit = {
+ node match {
+ case Null =>
+ buffer ++= "null\n"
+ case Bool(true) =>
+ buffer ++= "true\n"
+ case Bool(false) =>
+ buffer ++= "false\n"
+ case Num(num) =>
+ buffer ++= num.toString
+ buffer += '\n'
+ case Str(value) =>
+ buffer ++= value
+ buffer += '\n'
+ case Arr(values) =>
+ var doIndent = startOfLine
+ for (item <- values) {
+ if (doIndent) {
+ buffer ++= " " * indent
+ }
+ doIndent = true
+ item match {
+ case Arr(_) =>
+ buffer ++= "-\n"
+ writeCompact(buffer, true, indent + 1, item)
+ case _ =>
+ buffer ++= "- "
+ writeCompact(buffer, false, indent + 1, item)
+ }
+ }
+ case Obj(values) =>
+ var doIndent = startOfLine
+ for ((key, value) <- values) {
+ if (doIndent) {
+ buffer ++= " " * indent
+ }
+ doIndent = true
+
+ buffer ++= key
+ value match {
+ case Str(_) | Bool(_) | Num(_) | Null =>
+ buffer ++= ": "
+ writeCompact(buffer, false, indent, value)
+ case _ =>
+ buffer ++= ":\n"
+ writeCompact(buffer, true, indent + 1, value)
+ }
+ }
+ }
+ }
+
+}
diff --git a/yamlesque/src/YamlNodes.scala b/yamlesque/src/YamlNodes.scala
new file mode 100644
index 0000000..eebe783
--- /dev/null
+++ b/yamlesque/src/YamlNodes.scala
@@ -0,0 +1,81 @@
+package yamlesque
+
+import scala.collection.mutable
+
+sealed trait Node {
+ def isObj: Boolean = false
+ def isArr: Boolean = false
+ def isStr: Boolean = false
+ def isNum: Boolean = false
+ def isBool: Boolean = false
+ def isNull: Boolean = false
+
+ /** Returns the key-value map of this node. Fails if this is not a [[Obj]]. */
+ def obj: mutable.Map[String, Node] = sys.error("not an object")
+ def arr: mutable.ArrayBuffer[Node] = sys.error("not an array")
+ def str: String = sys.error("not a string")
+ def num: Double = sys.error("not a number")
+ def bool: Boolean = sys.error("not a boolean")
+
+}
+object Node {
+ import scala.language.implicitConversions
+ // implicit def SeqToYaml[T](items: IterableOnce[T])
+ // (implicit f: T => Node) = Arr.from(items.map(f))
+ // implicit def JsonableDict[T](items: TraversableOnce[(String, T)])
+ // (implicit f: T => Value)= Obj.from(items.map(x => (x._1, f(x._2))))
+ implicit def StringToYaml(s: CharSequence): Str = Str(s.toString)
+ implicit def ByteToYaml(x: Byte): Num = Num(x)
+ implicit def ShortToYaml(x: Short): Num = Num(x)
+ implicit def IntToYaml(x: Int): Num = Num(x)
+ implicit def LongToYaml(x: Long): Num = Num(x)
+ implicit def FloatToYaml(x: Float): Num = Num(x)
+ implicit def DoubleToYaml(x: Double): Num = Num(x)
+ implicit def BoolToYaml(x: Boolean): Bool = Bool(x)
+ implicit def NullToYaml(x: scala.Null): Null.type = Null
+}
+
+case class Obj(override val obj: mutable.LinkedHashMap[String, Node])
+ extends Node {
+ override def isObj = true
+}
+object Obj {
+ def apply(values: (String, Node)*): Obj = {
+ val builder = mutable.LinkedHashMap.newBuilder[String, Node]
+ builder.sizeHint(values.length)
+ for (v <- values) {
+ builder += v
+ }
+ Obj(builder.result())
+ }
+}
+
+case class Arr(override val arr: mutable.ArrayBuffer[Node]) extends Node {
+ override def isArr = true
+}
+object Arr {
+ def apply(values: Node*): Arr = {
+ val builder = mutable.ArrayBuffer.newBuilder[Node]
+ builder.sizeHint(values.length)
+ for (v <- values) {
+ builder += v
+ }
+ Arr(builder.result())
+ }
+}
+
+case class Str(override val str: String) extends Node {
+ override def isStr: Boolean = true
+}
+
+case class Num(override val num: Double) extends Node {
+ override def isNum: Boolean = true
+}
+
+case class Bool(override val bool: Boolean) extends Node {
+ override def isBool: Boolean = true
+}
+
+case object Null extends Node {
+ override def isNull = true
+}
diff --git a/yamlesque/src/main/scala/YamlParser.scala b/yamlesque/src/main/scala/YamlParser.scala
deleted file mode 100644
index f7a0f9b..0000000
--- a/yamlesque/src/main/scala/YamlParser.scala
+++ /dev/null
@@ -1,258 +0,0 @@
-package yamlesque
-
-import annotation.{switch, tailrec}
-import scala.collection.mutable.ListBuffer
-
-object YamlParser extends (Iterator[Char] => YamlValue) {
-
- sealed trait TokenKind
- object TokenKind {
- case object EOF extends TokenKind
- case object BAD extends TokenKind
- case object DOCSTART extends TokenKind
- case object DOCEND extends TokenKind
- case object MAPPING extends TokenKind
- case object ITEM extends TokenKind
- case object IDENTIFIER extends TokenKind
- case object COMMENT extends TokenKind
- }
- import TokenKind._
-
- case class Token(val kind: TokenKind, value: String = "") {
- var line: Int = 0
- var col: Int = 0
- def setPos(line: Int, col: Int): this.type = {
- this.col = col
- this.line = line
- this
- }
- override def toString() = {
- s"($line, $col): " + super.toString
- }
- }
-
- object Chars {
- final val LF = '\u000A'
- final val CR = '\u000D'
- final val SU = '\u001A'
-
- @inline def isSpace(ch: Char): Boolean = ch match {
- case ' ' | '\t' => true
- case _ => false
- }
-
- @inline def isBlank(ch: Char): Boolean = ch match {
- case ' ' | '\t' | CR | LF | SU => true
- case _ => false
- }
- }
-
- class Scanner(chars: Iterator[Char]) extends Iterator[Token] {
- import Chars._
-
- private var ch0: Char = 0
- private var ch1: Char = 0
- private var ch2: Char = 0
- private var pos: Long = 0
- private var line: Int = 0
- private var col: Int = 0
-
- private def skipChar(): Unit = {
- val ch: Char = if (chars.hasNext) {
- chars.next()
- } else {
- SU
- }
- pos += 1
- col += 1
- ch0 = ch1
- ch1 = ch2
- ch2 = ch
- }
- private def skipChars(n: Int): Unit = {
- var i = 0
- while (i < n) { skipChar(); i += 1 }
- }
- def init() = {
- skipChars(3)
- pos = 0
- col = 0
- line = 0
- }
-
- private var buffer = new StringBuilder()
- private def putChar(): Unit = {
- buffer.append(ch0)
- skipChars(1)
- }
- private def tokenValue(): String = {
- val str = buffer.result()
- buffer.clear()
- str
- }
-
- private var token: Token = Token(BAD, "not yet initialized")
-
- @tailrec private def fetchToken(): Unit = {
- ch0 match {
- case ':' if isBlank(ch1) =>
- token = Token(MAPPING).setPos(line, col)
- skipChars(1)
- case '-' if isBlank(ch1) =>
- token = Token(ITEM).setPos(line, col)
- skipChars(1)
- case '-' if ch1 == '-' && ch2 == '-' =>
- token = Token(DOCSTART).setPos(line, col)
- skipChars(3)
- case '.' if ch1 == '.' && ch2 == '.' =>
- token = Token(DOCEND).setPos(line, col)
- skipChars(3)
- case '#' =>
- val l = line
- val c = col
- skipChars(1)
- while (ch0 != LF && ch0 != SU) {
- putChar()
- }
- token = Token(COMMENT, tokenValue()).setPos(l, c)
- buffer.clear()
- case c if isSpace(c) =>
- skipChars(1)
- fetchToken()
- case LF =>
- skipChars(1)
- col = 0
- line += 1
- fetchToken()
- case CR =>
- skipChars(1)
- if (ch0 == LF) {
- skipChars(1)
- }
- col = 0
- line += 1
- fetchToken()
- case SU =>
- token = Token(EOF).setPos(line, col)
- skipChars(1)
- case _ => fetchScalar()
- }
- }
-
- private def fetchScalar(): Unit = {
- val l = line
- val c = col
- @tailrec def fetchRest(): Unit = ch0 match {
- case ':' if isBlank(ch1) =>
- token = Token(IDENTIFIER, tokenValue())
- case LF =>
- token = Token(IDENTIFIER, tokenValue())
- case SU =>
- token = Token(IDENTIFIER, tokenValue())
- case c =>
- putChar()
- fetchRest()
- }
- fetchRest()
- token.setPos(l, c)
- }
-
- override def hasNext: Boolean = true
- override def next(): Token = {
- fetchToken()
- token
- }
- init()
- }
-
- def parse(tokens: Iterator[Token]): YamlValue = {
- var token0 = tokens.next()
- var token1 = tokens.next()
-
- def readNext(): Unit = {
- token0 = token1
- token1 = tokens.next()
- }
-
- def fatal(message: String, token: Token) = {
- val completeMessage =
- s"parse error at line ${token.line}, column ${token.col}: $message"
- throw new ParseException(completeMessage)
- }
-
- def wrongKind(found: Token, required: TokenKind*) = {
- fatal(
- s"token kind not allowed at this position\n" +
- s" found: ${found.kind}\n" +
- s" required: ${required.mkString(" or ")}\n" +
- " " * found.col + found.value + "\n" +
- " " * found.col + "^",
- found
- )
- }
-
- def nextSequence() = {
- val startCol = token0.col
- val items = new ListBuffer[YamlValue]
- while (startCol <= token0.col && token0.kind != EOF) {
- token0.kind match {
- case ITEM =>
- readNext()
- items += nextBlock(startCol + 1)
- case _ => wrongKind(token0, ITEM)
- }
- }
- YamlSequence(items.toVector)
- }
-
- def nextMapping() = {
- val startCol = token0.col
- val fields = new ListBuffer[(String, YamlValue)]
- while (startCol <= token0.col && token0.kind != EOF) {
- token0.kind match {
- case IDENTIFIER =>
- val key = token0.value
- readNext()
- token0.kind match {
- case MAPPING =>
- readNext()
- val value = nextBlock(startCol + 1)
- fields += key -> value
- case _ => wrongKind(token0, MAPPING)
- }
-
- case _ => wrongKind(token0, IDENTIFIER)
- }
- }
- YamlMapping(fields.toMap)
- }
-
- def nextBlock(startCol: Int): YamlValue = {
- if (token0.col < startCol) {
- YamlEmpty
- } else {
- token0.kind match {
- case IDENTIFIER =>
- if (token1.kind == MAPPING && token0.line == token1.line) {
- nextMapping()
- } else {
- val y = YamlScalar(token0.value)
- readNext()
- y
- }
- case ITEM =>
- nextSequence()
- case EOF => YamlEmpty
- case _ => wrongKind(token0, IDENTIFIER, ITEM)
- }
- }
- }
-
- nextBlock(0)
- }
-
- def apply(data: Iterator[Char]): YamlValue = parse(new Scanner(data))
-
-}
-
-class ParseException(val message: String) extends Exception(message)
diff --git a/yamlesque/src/main/scala/YamlPrinter.scala b/yamlesque/src/main/scala/YamlPrinter.scala
deleted file mode 100644
index 083a8a8..0000000
--- a/yamlesque/src/main/scala/YamlPrinter.scala
+++ /dev/null
@@ -1,48 +0,0 @@
-package yamlesque
-
-import annotation.tailrec
-
-class YamlPrinter(compact: Boolean = true) extends (YamlValue => String) {
-
- def apply(value: YamlValue): String = {
- val str = new StringBuilder()
- def p(value: YamlValue, indentation: Int): Unit = value match {
- case YamlScalar(value) =>
- str ++= " " * indentation
- str ++= value
- str += '\n'
- case YamlSequence(items) =>
- for (item <- items) {
- str ++= " " * indentation
- item match {
- case YamlScalar(v) if compact =>
- str ++= "- "
- str ++= v
- str += '\n'
- case _ =>
- str ++= "-\n"
- p(item, indentation + 1)
- }
- }
- case YamlMapping(fields) =>
- for ((key, value) <- fields) {
- str ++= " " * indentation
- str ++= key
- value match {
- case YamlScalar(v) if compact =>
- str ++= ": "
- str ++= v
- str += '\n'
- case _ =>
- str ++= ":\n"
- p(value, indentation + 1)
- }
- }
- case YamlEmpty =>
- str += '\n'
- }
- p(value, 0)
- str.toString
- }
-
-}
diff --git a/yamlesque/src/main/scala/formats.scala b/yamlesque/src/main/scala/formats.scala
deleted file mode 100644
index 0dbbacc..0000000
--- a/yamlesque/src/main/scala/formats.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-package yamlesque
-
-trait YamlReader[A] {
- def read(yaml: YamlValue): A
-}
-trait YamlWriter[A] {
- def write(a: A): YamlValue
-}
diff --git a/yamlesque/src/main/scala/package.scala b/yamlesque/src/main/scala/package.scala
deleted file mode 100644
index c40ca70..0000000
--- a/yamlesque/src/main/scala/package.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-package yamlesque
-
-object `package` {
-
- def deserializationError(msg: String,
- cause: Throwable = null,
- fieldNames: List[String] = Nil) =
- throw new DeserializationException(msg, cause, fieldNames)
- def serializationError(msg: String) = throw new SerializationException(msg)
-
- implicit class RichAny[A](val any: A) extends AnyVal {
- def toYaml(implicit writer: YamlWriter[A]): YamlValue = writer.write(any)
- }
-
- implicit class RichString(val str: String) extends AnyVal {
- def parseYaml: YamlValue = YamlParser(str.toIterator)
- }
-
-}
-
-case class DeserializationException(msg: String,
- cause: Throwable = null,
- fieldNames: List[String] = Nil)
- extends RuntimeException(msg, cause)
-
-class SerializationException(msg: String) extends RuntimeException(msg)
diff --git a/yamlesque/src/main/scala/yamlValues.scala b/yamlesque/src/main/scala/yamlValues.scala
deleted file mode 100644
index 4432b9d..0000000
--- a/yamlesque/src/main/scala/yamlValues.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-package yamlesque
-
-sealed trait YamlValue {
- def print: String = YamlValue.DefaultPrinter(this)
- def convertTo[A: YamlReader]: A = implicitly[YamlReader[A]].read(this)
-}
-object YamlValue {
- val DefaultPrinter = new YamlPrinter(compact = true)
-}
-
-case class YamlMapping(fields: Map[String, YamlValue]) extends YamlValue
-object YamlMapping {
- def apply(items: (String, YamlValue)*) = new YamlMapping(Map(items: _*))
-}
-
-case class YamlSequence(items: Vector[YamlValue]) extends YamlValue
-object YamlSequence {
- def apply(items: YamlValue*) = new YamlSequence(items.toVector)
-}
-
-case class YamlScalar(value: String) extends YamlValue
-
-case object YamlEmpty extends YamlValue
diff --git a/yamlesque/src/package.scala b/yamlesque/src/package.scala
new file mode 100644
index 0000000..43ab30b
--- /dev/null
+++ b/yamlesque/src/package.scala
@@ -0,0 +1,30 @@
+package object yamlesque {
+ import java.io.StringReader
+
+ def read(input: String): Node = {
+ (new Parser(new StringReader(input))).next()
+ }
+
+ def tryRead(input: String): Either[String, Node] =
+ try {
+ Right(read(input))
+ } catch {
+ case Parser.ParseException(msg) => Left(msg)
+ }
+
+ def readAll(input: String): List[Node] = {
+ (new Parser(new StringReader(input))).toList
+ }
+
+ // TODO: the parser can actually recover from errors when a new document begins
+ def tryReadAll(input: String): Either[String, List[Node]] =
+ try {
+ Right((new Parser(new StringReader(input))).toList)
+ } catch {
+ case Parser.ParseException(msg) => Left(msg)
+ }
+
+ def write(nodes: Node*): String = write(nodes)
+ def write(nodes: Iterable[Node]): String = Writer.write(nodes)
+
+}
diff --git a/yamlesque/src/test/scala/ParserTests.scala b/yamlesque/src/test/scala/ParserTests.scala
deleted file mode 100644
index 9229a14..0000000
--- a/yamlesque/src/test/scala/ParserTests.scala
+++ /dev/null
@@ -1,221 +0,0 @@
-package yamlesque
-
-import utest._
-
-object ParserTests extends TestSuite {
-
- val tests = Tests {
- "parse empty string" - {
- "".parseYaml ==> YamlEmpty
- }
- "parse simple scalar" - {
- "hello".parseYaml ==> YamlScalar("hello")
- }
- "parse scalar with space" - {
- "hello world".parseYaml ==> YamlScalar("hello world")
- }
- "parse scalar with a colon" - {
- "hello:world".parseYaml ==> YamlScalar("hello:world")
- }
- "parse scalar with a minus" - {
- "hello-world".parseYaml ==> YamlScalar("hello-world")
- }
- "parse scalar starting with a colon" - {
- ":hello world".parseYaml ==> YamlScalar(":hello world")
- }
- "parse scalar starting with a minus" - {
- "-hello world".parseYaml ==> YamlScalar("-hello world")
- }
- "parse empty list" - {
- "-".parseYaml ==> YamlSequence(YamlEmpty)
- }
- "parse a simple list" - {
- "-\n a\n-\n b\n-\n c".parseYaml ==> YamlSequence(YamlScalar("a"),
- YamlScalar("b"),
- YamlScalar("c"))
- }
- "parse a simple compact list" - {
- "- a\n- b\n - c".parseYaml ==> YamlSequence(YamlScalar("a"),
- YamlScalar("b"),
- YamlScalar("c"))
- }
- "fail to parse a list with a non-item token" - {
- val e = intercept[ParseException] {
- "- a\n- b\n -c".parseYaml // -c is missing a space between '-' and 'c'
- }
- assert(e.message.contains("token kind"))
- }
- "parse a nested list" - {
- val ls =
- s"""|- a0
- |- b0
- |-
- | - a1
- | - b1
- | -
- | - a2
- | - b2
- |- c0
- |- - a1
- | - b1
- |- - - - a4
- |""".stripMargin
- val result = YamlSequence(
- YamlScalar("a0"),
- YamlScalar("b0"),
- YamlSequence(
- YamlScalar("a1"),
- YamlScalar("b1"),
- YamlSequence(
- YamlScalar("a2"),
- YamlScalar("b2")
- )
- ),
- YamlScalar("c0"),
- YamlSequence(
- YamlScalar("a1"),
- YamlScalar("b1")
- ),
- YamlSequence(
- YamlSequence(
- YamlSequence(
- YamlScalar("a4")
- )
- )
- )
- )
- ls.parseYaml ==> result
- }
- "parse a simple mapping" - {
- "a:\n b".parseYaml ==> YamlMapping("a" -> YamlScalar("b"))
- }
- "parse a double mapping" - {
- "a:\n b\nc:\n d".parseYaml ==> YamlMapping(
- "a" -> YamlScalar("b"),
- "c" -> YamlScalar("d")
- )
- }
- "parse a simple compact mapping" - {
- "a: b".parseYaml ==> YamlMapping("a" -> YamlScalar("b"))
- }
- "parse a double compact mapping" - {
- "a: b\nc: d".parseYaml ==> YamlMapping(
- "a" -> YamlScalar("b"),
- "c" -> YamlScalar("d")
- )
- }
- "parse a simple mapping without a value" - {
- "a:\n".parseYaml ==> YamlMapping(
- "a" -> YamlEmpty
- )
- }
- "parse a mapping without a value" - {
- "k1: v1\nk2:\nk3: v3".parseYaml ==> YamlMapping(
- "k1" -> YamlScalar("v1"),
- "k2" -> YamlEmpty,
- "k3" -> YamlScalar("v3")
- )
- }
- "parse a nested mapping" - {
- val m =
- s"""|k1:
- | k11: a
- | k12: b
- |k2:
- | k21:
- | k31:
- | k41: a
- | k22:
- | b
- |k3: a
- |k4: k41: k42: k43: a
- |""".stripMargin
- m.parseYaml ==> YamlMapping(
- "k1" -> YamlMapping(
- "k11" -> YamlScalar("a"),
- "k12" -> YamlScalar("b")
- ),
- "k2" -> YamlMapping(
- "k21" -> YamlMapping(
- "k31" -> YamlMapping(
- "k41" -> YamlScalar("a")
- )
- ),
- "k22" -> YamlScalar("b")
- ),
- "k3" -> YamlScalar("a"),
- "k4" -> YamlMapping(
- "k41" -> YamlMapping(
- "k42" -> YamlMapping(
- "k43" -> YamlScalar("a")
- )
- )
- )
- )
- }
- "maps and sequences" - {
- val yaml = YamlMapping(
- "key1" -> YamlScalar("value1"),
- "key2" -> YamlMapping(
- "key1" -> YamlScalar("value1"),
- "key2" -> YamlScalar("value1"),
- "key3" -> YamlSequence(
- YamlScalar("a1"),
- YamlSequence(
- YamlScalar("a1"),
- YamlScalar("a2"),
- YamlScalar("a3")
- ),
- YamlScalar("a3"),
- YamlMapping(
- "a1" -> YamlScalar("b"),
- "a2" -> YamlScalar("b"),
- "a3" -> YamlScalar("b"),
- "a4" -> YamlScalar("b")
- ),
- YamlScalar("a4"),
- YamlScalar("a4")
- ),
- "key4" -> YamlScalar("value1"),
- "key5" -> YamlScalar("value1"),
- "key6" -> YamlScalar("value1")
- ),
- "key3" -> YamlScalar("value3")
- )
-
- val string =
- s"""|
- |key1: value1
- |key2:
- | key4:
- | value1
- | key5: value1
- | key1: value1
- | key2: value1
- | key6: value1
- | key3:
- | - a1
- | -
- | - a1
- | - a2
- | - a3
- | - a3
- | -
- | a1: b
- | a2: b
- | a3: b
- | a4: b
- | - a4
- | - a4
- |key3: value3
- |""".stripMargin
- "parse" - {
- string.parseYaml ==> yaml
- }
- "print and parse" - {
- yaml.print.parseYaml ==> yaml
- }
- }
- }
-
-}