summaryrefslogtreecommitdiff
path: root/docs/examples/parsing
diff options
context:
space:
mode:
Diffstat (limited to 'docs/examples/parsing')
-rw-r--r--docs/examples/parsing/ArithmeticParser.scala14
-rw-r--r--docs/examples/parsing/ArithmeticParsers.scala10
-rw-r--r--docs/examples/parsing/JSON.scala14
-rw-r--r--docs/examples/parsing/ListParser.scala2
-rw-r--r--docs/examples/parsing/ListParsers.scala4
-rw-r--r--docs/examples/parsing/MiniML.scala6
-rw-r--r--docs/examples/parsing/lambda/Main.scala4
-rw-r--r--docs/examples/parsing/lambda/TestParser.scala18
-rw-r--r--docs/examples/parsing/lambda/TestSyntax.scala28
9 files changed, 50 insertions, 50 deletions
diff --git a/docs/examples/parsing/ArithmeticParser.scala b/docs/examples/parsing/ArithmeticParser.scala
index e8fbee4499..99cf7a5578 100644
--- a/docs/examples/parsing/ArithmeticParser.scala
+++ b/docs/examples/parsing/ArithmeticParser.scala
@@ -15,16 +15,16 @@ import scala.util.parsing.combinator.syntactical.StdTokenParsers
* a term is a sequence of factors, separated by * or /
* a factor is a parenthesized expression or a number
*
- * @author Adriaan Moors
- */
-object arithmeticParser extends StdTokenParsers {
+ * @author Adriaan Moors
+ */
+object arithmeticParser extends StdTokenParsers {
type Tokens = StdLexical ; val lexical = new StdLexical
lexical.delimiters ++= List("(", ")", "+", "-", "*", "/")
lazy val expr = term*("+" ^^^ {(x: int, y: int) => x + y} | "-" ^^^ {(x: int, y: int) => x - y})
lazy val term = factor*("*" ^^^ {(x: int, y: int) => x * y} | "/" ^^^ {(x: int, y: int) => x / y})
lazy val factor: Parser[int] = "(" ~> expr <~ ")" | numericLit ^^ (_.toInt)
-
+
def main(args: Array[String]) {
println(
if (args.length == 1) {
@@ -37,14 +37,14 @@ object arithmeticParser extends StdTokenParsers {
}
-object arithmeticParserDesugared extends StdTokenParsers {
+object arithmeticParserDesugared extends StdTokenParsers {
type Tokens = StdLexical ; val lexical = new StdLexical
lexical.delimiters ++= List("(", ")", "+", "-", "*", "/")
lazy val expr = chainl1(term, (keyword("+").^^^{(x: int, y: int) => x + y}).|(keyword("-").^^^{(x: int, y: int) => x - y}))
lazy val term = chainl1(factor, (keyword("*").^^^{(x: int, y: int) => x * y}).|(keyword("/").^^^{(x: int, y: int) => x / y}))
- lazy val factor: Parser[int] = keyword("(").~>(expr.<~(keyword(")"))).|(numericLit.^^(x => x.toInt))
-
+ lazy val factor: Parser[int] = keyword("(").~>(expr.<~(keyword(")"))).|(numericLit.^^(x => x.toInt))
+
def main(args: Array[String]) {
println(
if (args.length == 1) {
diff --git a/docs/examples/parsing/ArithmeticParsers.scala b/docs/examples/parsing/ArithmeticParsers.scala
index 8fb3af7acb..62d7a61862 100644
--- a/docs/examples/parsing/ArithmeticParsers.scala
+++ b/docs/examples/parsing/ArithmeticParsers.scala
@@ -2,7 +2,7 @@ package examples.parsing
import scala.util.parsing.combinator1.syntactical.StandardTokenParsers
-object ArithmeticParsers extends StandardTokenParsers {
+object ArithmeticParsers extends StandardTokenParsers {
lexical.delimiters ++= List("(", ")", "+", "-", "*", "/")
def expr: Parser[Any] = term ~ rep("+" ~ term | "-" ~ term)
@@ -16,11 +16,11 @@ object ArithmeticParsers extends StandardTokenParsers {
}
}
-object ArithmeticParsers1 extends StandardTokenParsers {
+object ArithmeticParsers1 extends StandardTokenParsers {
lexical.delimiters ++= List("(", ")", "+", "-", "*", "/")
val reduceList: Int ~ List[String ~ Int] => Int = {
- case i ~ ps => (i /: ps)(reduce)
+ case i ~ ps => (i /: ps)(reduce)
}
def reduce(x: Int, r: String ~ Int) = (r: @unchecked) match {
@@ -45,11 +45,11 @@ class Expr
case class BinOp(op: String, l: Expr, r: Expr) extends Expr
case class Num(n: Int) extends Expr
-object ArithmeticParsers2 extends StandardTokenParsers {
+object ArithmeticParsers2 extends StandardTokenParsers {
lexical.delimiters ++= List("(", ")", "+", "-", "*", "/")
val reduceList: Expr ~ List[String ~ Expr] => Expr = {
- case i ~ ps => (i /: ps)(reduce)
+ case i ~ ps => (i /: ps)(reduce)
}
def reduce(l: Expr, r: String ~ Expr) = BinOp(r._1, l, r._2)
diff --git a/docs/examples/parsing/JSON.scala b/docs/examples/parsing/JSON.scala
index bbba25f744..abfa242e9f 100644
--- a/docs/examples/parsing/JSON.scala
+++ b/docs/examples/parsing/JSON.scala
@@ -2,14 +2,14 @@ package examples.parsing
import scala.util.parsing.combinator1.syntactical.StandardTokenParsers
-object JSON extends StandardTokenParsers {
+object JSON extends StandardTokenParsers {
lexical.delimiters += ("{", "}", "[", "]", ":", ",")
lexical.reserved += ("null", "true", "false")
def obj : Parser[Any] = "{" ~ repsep(member, ",") ~ "}"
def arr : Parser[Any] = "[" ~ repsep(value, ",") ~ "]"
def member: Parser[Any] = ident ~ ":" ~ value
- def value : Parser[Any] = ident | numericLit | obj | arr |
+ def value : Parser[Any] = ident | numericLit | obj | arr |
"null" | "true" | "false"
def main(args: Array[String]) {
@@ -18,20 +18,20 @@ object JSON extends StandardTokenParsers {
println(phrase(value)(tokens))
}
}
-object JSON1 extends StandardTokenParsers {
+object JSON1 extends StandardTokenParsers {
lexical.delimiters += ("{", "}", "[", "]", ":", ",")
lexical.reserved += ("null", "true", "false")
- def obj: Parser[Map[String, Any]] =
+ def obj: Parser[Map[String, Any]] =
"{" ~> repsep(member, ",") <~ "}" ^^ (Map() ++ _)
def arr: Parser[List[Any]] =
- "[" ~> repsep(value, ",") <~ "]"
+ "[" ~> repsep(value, ",") <~ "]"
- def member: Parser[(String, Any)] =
+ def member: Parser[(String, Any)] =
ident ~ ":" ~ value ^^ { case name ~ ":" ~ value => (name -> value) }
- def value: Parser[Any] =
+ def value: Parser[Any] =
ident | numericLit ^^ (_.toInt) | obj | arr |
"null" ^^^ null | "true" ^^^ true | "false" ^^^ false
diff --git a/docs/examples/parsing/ListParser.scala b/docs/examples/parsing/ListParser.scala
index 12805e5e50..59fc292c1d 100644
--- a/docs/examples/parsing/ListParser.scala
+++ b/docs/examples/parsing/ListParser.scala
@@ -14,7 +14,7 @@ object listParser {
class ListParsers extends Parsers {
type Elem = Char
-
+
lazy val ident = rep1(elem("letter", isLetter), elem("letter or digit", isLetterOrDigit)) ^^ {cs => Id(mkString(cs))}
lazy val number = chainl1(elem("digit", isDigit) ^^ (_ - '0'), success{(accum: Int, d: Int) => accum * 10 + d}) ^^ Num
lazy val list = '(' ~> repsep(expr, ',') <~ ')' ^^ Lst
diff --git a/docs/examples/parsing/ListParsers.scala b/docs/examples/parsing/ListParsers.scala
index f503a0139f..b449c4a641 100644
--- a/docs/examples/parsing/ListParsers.scala
+++ b/docs/examples/parsing/ListParsers.scala
@@ -2,7 +2,7 @@ package examples.parsing
import scala.util.parsing.combinator1.syntactical.StandardTokenParsers
-object ListParsers extends StandardTokenParsers {
+object ListParsers extends StandardTokenParsers {
lexical.delimiters ++= List("(", ")", ",")
def expr: Parser[Any] = "(" ~ exprs ~ ")" | ident | numericLit
@@ -15,7 +15,7 @@ object ListParsers extends StandardTokenParsers {
}
}
-object ListParsers1 extends StandardTokenParsers {
+object ListParsers1 extends StandardTokenParsers {
lexical.delimiters ++= List("(", ")", ",")
def expr: Parser[Any] = "(" ~> exprs <~ ")" | ident | numericLit
diff --git a/docs/examples/parsing/MiniML.scala b/docs/examples/parsing/MiniML.scala
index ffc7c2ac92..f7f7172e8d 100644
--- a/docs/examples/parsing/MiniML.scala
+++ b/docs/examples/parsing/MiniML.scala
@@ -3,7 +3,7 @@ package examples.parsing
import scala.util.parsing.combinator1.syntactical.StandardTokenParsers
import scala.util.parsing.combinator1.syntactical.StandardTokenParsers
-object MiniML extends StandardTokenParsers {
+object MiniML extends StandardTokenParsers {
lexical.delimiters += ("(", ")", ".", "=")
lexical.reserved += ("lambda", "let", "in")
@@ -30,7 +30,7 @@ case class Lambda(x: String, expr: Expr) extends Expr
case class Apply(fun: Expr, arg: Expr) extends Expr
case class Var(x: String) extends Expr
-object MiniML1 extends StandardTokenParsers {
+object MiniML1 extends StandardTokenParsers {
lexical.delimiters += ("(", ")", ".", "=")
lexical.reserved += ("lambda", "let", "in")
@@ -41,7 +41,7 @@ object MiniML1 extends StandardTokenParsers {
)
def simpleExpr: Parser[Expr] = (
ident ^^ { Var }
- | "(" ~> expr <~ ")"
+ | "(" ~> expr <~ ")"
)
def main(args: Array[String]) {
diff --git a/docs/examples/parsing/lambda/Main.scala b/docs/examples/parsing/lambda/Main.scala
index 81a175de77..165e82b670 100644
--- a/docs/examples/parsing/lambda/Main.scala
+++ b/docs/examples/parsing/lambda/Main.scala
@@ -27,8 +27,8 @@ object Main extends Application with TestParser
{
Console.println("Term: \n"+term)
}
- case Failure(msg, remainder) => Console.println("Failure: "+msg+"\n"+"Remainder: \n"+remainder.pos.longString)
- case Error(msg, remainder) => Console.println("Error: "+msg+"\n"+"Remainder: \n"+remainder.pos.longString)
+ case Failure(msg, remainder) => Console.println("Failure: "+msg+"\n"+"Remainder: \n"+remainder.pos.longString)
+ case Error(msg, remainder) => Console.println("Error: "+msg+"\n"+"Remainder: \n"+remainder.pos.longString)
}
}
}
diff --git a/docs/examples/parsing/lambda/TestParser.scala b/docs/examples/parsing/lambda/TestParser.scala
index 623b597337..d26589da1b 100644
--- a/docs/examples/parsing/lambda/TestParser.scala
+++ b/docs/examples/parsing/lambda/TestParser.scala
@@ -17,9 +17,9 @@ trait TestParser extends StdTokenParsers with ImplicitConversions with TestSynt
lexical.reserved ++= List("unit", "let", "in", "if", "then", "else")
lexical.delimiters ++= List("=>", "->", "==", "(", ")", "=", "\\", "+", "-", "*", "/")
-
+
def name : Parser[Name] = ident ^^ Name
-
+
// meaning of the arguments to the closure during subsequent iterations
// (...(expr2 op1 expr1) ... op1 expr1)
// ^a^^^ ^o^ ^b^^^
@@ -29,10 +29,10 @@ trait TestParser extends StdTokenParsers with ImplicitConversions with TestSynt
def expr2 : Parser[Term] =
chainl1(expr3, expr2, op2 ^^ {o => (a: Term, b: Term) => App(App(o, a), b)})
-
+
def expr3 : Parser[Term] =
chainl1(expr4, expr3, op3 ^^ {o => (a: Term, b: Term) => App(App(o, a), b)})
-
+
def expr4 : Parser[Term] =
( "\\" ~> lambdas
| ("let" ~> name) ~ ("=" ~> expr1) ~ ("in" ~> expr1) ^^ flatten3(Let)
@@ -42,27 +42,27 @@ trait TestParser extends StdTokenParsers with ImplicitConversions with TestSynt
def lambdas : Parser[Term] =
name ~ ("->" ~> expr1 | lambdas) ^^ flatten2(Lam)
-
+
def aexpr : Parser[Term] =
( numericLit ^^ (_.toInt) ^^ Lit
| name ^^ Ref
| "unit" ^^^ Unit()
| "(" ~> expr1 <~ ")"
)
-
+
def op1 : Parser[Term] =
"==" ^^^ Ref(Name("=="))
-
+
def op2 : Parser[Term] =
( "+" ^^^ Ref(Name("+"))
| "-" ^^^ Ref(Name("-"))
)
-
+
def op3 : Parser[Term] =
( "*" ^^^ Ref(Name("*"))
| "/" ^^^ Ref(Name("/"))
)
-
+
def parse(r: Reader[char]) : ParseResult[Term] =
phrase(expr1)(new lexical.Scanner(r))
}
diff --git a/docs/examples/parsing/lambda/TestSyntax.scala b/docs/examples/parsing/lambda/TestSyntax.scala
index 531ae4bd54..7edca6ccdc 100644
--- a/docs/examples/parsing/lambda/TestSyntax.scala
+++ b/docs/examples/parsing/lambda/TestSyntax.scala
@@ -5,25 +5,25 @@ package examples.parsing.lambda
*
* @author Miles Sabin (adapted slightly by Adriaan Moors)
*/
-trait TestSyntax
+trait TestSyntax
{
- trait Term
-
+ trait Term
+
case class Unit extends Term
{
override def toString = "unit"
}
-
+
case class Lit(n: int) extends Term
{
override def toString = n.toString
}
-
+
case class Bool(b: boolean) extends Term
{
override def toString = b.toString
}
-
+
case class Name(name: String) extends Term
{
override def toString = name
@@ -33,27 +33,27 @@ trait TestSyntax
{
def value = n
}
-
+
case class Lam(n: Name, l: Term) extends Term
{
override def toString = "(\\ "+n+" -> "+l+")"
- }
-
+ }
+
case class App(t1: Term, t2: Term) extends Term
{
override def toString = "("+t1+" "+t2+")"
- }
-
+ }
+
case class Let(n: Name, t1: Term, t2: Term) extends Term
{
override def toString = "let "+n+" = "+t1+" in "+t2
}
-
+
case class If(c: Term, t1: Term, t2: Term) extends Term
{
override def toString = "if "+c+" then "+t1+" else "+t2
}
-
+
trait PrimTerm extends Term
{
def apply(n: Lit) : Term
@@ -68,7 +68,7 @@ trait TestSyntax
{
def apply(x: Lit) = new PrimTerm { def apply(y: Lit) = Lit(x.n-y.n) }
}
-
+
case class PrimMultiply extends PrimTerm
{
def apply(x: Lit) = new PrimTerm { def apply(y: Lit) = Lit(x.n*y.n) }