summaryrefslogtreecommitdiff
path: root/docs/examples/parsing
diff options
context:
space:
mode:
Diffstat (limited to 'docs/examples/parsing')
-rw-r--r--docs/examples/parsing/ArithmeticParser.scala57
-rw-r--r--docs/examples/parsing/ArithmeticParsers.scala70
-rw-r--r--docs/examples/parsing/JSON.scala44
-rw-r--r--docs/examples/parsing/ListParser.scala33
-rw-r--r--docs/examples/parsing/ListParsers.scala30
-rw-r--r--docs/examples/parsing/MiniML.scala52
-rw-r--r--docs/examples/parsing/lambda/Main.scala34
-rw-r--r--docs/examples/parsing/lambda/TestParser.scala68
-rw-r--r--docs/examples/parsing/lambda/TestSyntax.scala86
-rw-r--r--docs/examples/parsing/lambda/test/test-01.kwi1
-rw-r--r--docs/examples/parsing/lambda/test/test-02.kwi1
-rw-r--r--docs/examples/parsing/lambda/test/test-03.kwi1
-rw-r--r--docs/examples/parsing/lambda/test/test-04.kwi1
-rw-r--r--docs/examples/parsing/lambda/test/test-05.kwi1
-rw-r--r--docs/examples/parsing/lambda/test/test-06.kwi1
-rw-r--r--docs/examples/parsing/lambda/test/test-07.kwi1
-rw-r--r--docs/examples/parsing/lambda/test/test-08.kwi1
17 files changed, 0 insertions, 482 deletions
diff --git a/docs/examples/parsing/ArithmeticParser.scala b/docs/examples/parsing/ArithmeticParser.scala
deleted file mode 100644
index 99cf7a5578..0000000000
--- a/docs/examples/parsing/ArithmeticParser.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package examples.parsing
-
-import scala.util.parsing.combinator.lexical.StdLexical
-import scala.util.parsing.combinator.syntactical.StdTokenParsers
-
-/** Parse and evaluate a numeric expression as a sequence of terms, separated by + or -
- * a term is a sequence of factors, separated by * or /
- * a factor is a parenthesized expression or a number
- *
- * @author Adriaan Moors
- */
-object arithmeticParser extends StdTokenParsers {
- type Tokens = StdLexical ; val lexical = new StdLexical
- lexical.delimiters ++= List("(", ")", "+", "-", "*", "/")
-
- lazy val expr = term*("+" ^^^ {(x: int, y: int) => x + y} | "-" ^^^ {(x: int, y: int) => x - y})
- lazy val term = factor*("*" ^^^ {(x: int, y: int) => x * y} | "/" ^^^ {(x: int, y: int) => x / y})
- lazy val factor: Parser[int] = "(" ~> expr <~ ")" | numericLit ^^ (_.toInt)
-
- def main(args: Array[String]) {
- println(
- if (args.length == 1) {
- expr(new lexical.Scanner(args(0)))
- }
- else
- "usage: scala examples.parsing.arithmeticParser <expr-string>"
- )
- }
-}
-
-
-object arithmeticParserDesugared extends StdTokenParsers {
- type Tokens = StdLexical ; val lexical = new StdLexical
- lexical.delimiters ++= List("(", ")", "+", "-", "*", "/")
-
- lazy val expr = chainl1(term, (keyword("+").^^^{(x: int, y: int) => x + y}).|(keyword("-").^^^{(x: int, y: int) => x - y}))
- lazy val term = chainl1(factor, (keyword("*").^^^{(x: int, y: int) => x * y}).|(keyword("/").^^^{(x: int, y: int) => x / y}))
- lazy val factor: Parser[int] = keyword("(").~>(expr.<~(keyword(")"))).|(numericLit.^^(x => x.toInt))
-
- def main(args: Array[String]) {
- println(
- if (args.length == 1) {
- expr(new lexical.Scanner(args(0)))
- }
- else
- "usage: scala examples.parsing.arithmeticParser <expr-string>"
- )
- }
-}
diff --git a/docs/examples/parsing/ArithmeticParsers.scala b/docs/examples/parsing/ArithmeticParsers.scala
deleted file mode 100644
index 62d7a61862..0000000000
--- a/docs/examples/parsing/ArithmeticParsers.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-package examples.parsing
-
-import scala.util.parsing.combinator1.syntactical.StandardTokenParsers
-
-object ArithmeticParsers extends StandardTokenParsers {
- lexical.delimiters ++= List("(", ")", "+", "-", "*", "/")
-
- def expr: Parser[Any] = term ~ rep("+" ~ term | "-" ~ term)
- def term = factor ~ rep("*" ~ factor | "/" ~ factor)
- def factor: Parser[Any] = "(" ~ expr ~ ")" | numericLit
-
- def main(args: Array[String]) {
- val tokens = new lexical.Scanner(args(0))
- println(args(0))
- println(phrase(expr)(tokens))
- }
-}
-
-object ArithmeticParsers1 extends StandardTokenParsers {
- lexical.delimiters ++= List("(", ")", "+", "-", "*", "/")
-
- val reduceList: Int ~ List[String ~ Int] => Int = {
- case i ~ ps => (i /: ps)(reduce)
- }
-
- def reduce(x: Int, r: String ~ Int) = (r: @unchecked) match {
- case "+" ~ y => x + y
- case "-" ~ y => x - y
- case "*" ~ y => x * y
- case "/" ~ y => x / y
- }
-
- def expr : Parser[Int] = term ~ rep ("+" ~ term | "-" ~ term) ^^ reduceList
- def term : Parser[Int] = factor ~ rep ("*" ~ factor | "/" ~ factor) ^^ reduceList
- def factor: Parser[Int] = "(" ~> expr <~ ")" | numericLit ^^ (_.toInt)
-
- def main(args: Array[String]) {
- val tokens = new lexical.Scanner(args(0))
- println(args(0))
- println(phrase(expr)(tokens))
- }
-}
-
-class Expr
-case class BinOp(op: String, l: Expr, r: Expr) extends Expr
-case class Num(n: Int) extends Expr
-
-object ArithmeticParsers2 extends StandardTokenParsers {
- lexical.delimiters ++= List("(", ")", "+", "-", "*", "/")
-
- val reduceList: Expr ~ List[String ~ Expr] => Expr = {
- case i ~ ps => (i /: ps)(reduce)
- }
-
- def reduce(l: Expr, r: String ~ Expr) = BinOp(r._1, l, r._2)
- def mkNum(s: String) = Num(s.toInt)
-
- def expr : Parser[Expr] = term ~ rep ("+" ~ term | "-" ~ term) ^^ reduceList
- def term : Parser[Expr] = factor ~ rep ("*" ~ factor | "/" ~ factor) ^^ reduceList
- def factor: Parser[Expr] = "(" ~> expr <~ ")" | numericLit ^^ ((s: String) => Num(s.toInt))
-
- def main(args: Array[String]) {
- val parse = phrase(expr)
- val tokens = new lexical.Scanner(args(0))
- println(args(0))
- println(parse(tokens))
- }
-}
-
-
diff --git a/docs/examples/parsing/JSON.scala b/docs/examples/parsing/JSON.scala
deleted file mode 100644
index abfa242e9f..0000000000
--- a/docs/examples/parsing/JSON.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-package examples.parsing
-
-import scala.util.parsing.combinator1.syntactical.StandardTokenParsers
-
-object JSON extends StandardTokenParsers {
- lexical.delimiters += ("{", "}", "[", "]", ":", ",")
- lexical.reserved += ("null", "true", "false")
-
- def obj : Parser[Any] = "{" ~ repsep(member, ",") ~ "}"
- def arr : Parser[Any] = "[" ~ repsep(value, ",") ~ "]"
- def member: Parser[Any] = ident ~ ":" ~ value
- def value : Parser[Any] = ident | numericLit | obj | arr |
- "null" | "true" | "false"
-
- def main(args: Array[String]) {
- val tokens = new lexical.Scanner(args(0))
- println(args(0))
- println(phrase(value)(tokens))
- }
-}
-object JSON1 extends StandardTokenParsers {
- lexical.delimiters += ("{", "}", "[", "]", ":", ",")
- lexical.reserved += ("null", "true", "false")
-
- def obj: Parser[Map[String, Any]] =
- "{" ~> repsep(member, ",") <~ "}" ^^ (Map() ++ _)
-
- def arr: Parser[List[Any]] =
- "[" ~> repsep(value, ",") <~ "]"
-
- def member: Parser[(String, Any)] =
- ident ~ ":" ~ value ^^ { case name ~ ":" ~ value => (name -> value) }
-
- def value: Parser[Any] =
- ident | numericLit ^^ (_.toInt) | obj | arr |
- "null" ^^^ null | "true" ^^^ true | "false" ^^^ false
-
- def main(args: Array[String]) {
- val tokens = new lexical.Scanner(args(0))
- println(args(0))
- println(phrase(value)(tokens))
- }
-}
-
diff --git a/docs/examples/parsing/ListParser.scala b/docs/examples/parsing/ListParser.scala
deleted file mode 100644
index 59fc292c1d..0000000000
--- a/docs/examples/parsing/ListParser.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-package examples.parsing
-
-import scala.util.parsing.combinator.{Parsers, ImplicitConversions, ~, mkTilde}
-import scala.util.parsing.input.CharArrayReader
-
-object listParser {
- abstract class Tree
- case class Id(s: String) extends Tree
- case class Num(n: Int) extends Tree
- case class Lst(elems: List[Tree]) extends Tree
-
- import Character.{isLetter, isLetterOrDigit, isDigit}
- def mkString(cs: List[Any]) = cs.mkString("")
-
- class ListParsers extends Parsers {
- type Elem = Char
-
- lazy val ident = rep1(elem("letter", isLetter), elem("letter or digit", isLetterOrDigit)) ^^ {cs => Id(mkString(cs))}
- lazy val number = chainl1(elem("digit", isDigit) ^^ (_ - '0'), success{(accum: Int, d: Int) => accum * 10 + d}) ^^ Num
- lazy val list = '(' ~> repsep(expr, ',') <~ ')' ^^ Lst
- lazy val expr: Parser[Tree] = list | ident | number
- }
-
- def main(args: Array[String]) {
- println(
- if (args.length == 1) {
- (new ListParsers).expr(new CharArrayReader(args(0).toCharArray()))
- }
- else
- "usage: scala examples.parsing.listParser <list-string>"
- )
- }
-}
diff --git a/docs/examples/parsing/ListParsers.scala b/docs/examples/parsing/ListParsers.scala
deleted file mode 100644
index b449c4a641..0000000000
--- a/docs/examples/parsing/ListParsers.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-package examples.parsing
-
-import scala.util.parsing.combinator1.syntactical.StandardTokenParsers
-
-object ListParsers extends StandardTokenParsers {
- lexical.delimiters ++= List("(", ")", ",")
-
- def expr: Parser[Any] = "(" ~ exprs ~ ")" | ident | numericLit
- def exprs: Parser[Any] = expr ~ rep ("," ~ expr)
-
- def main(args: Array[String]) {
- val tokens = new lexical.Scanner(args(0))
- println(args(0))
- println(phrase(expr)(tokens))
- }
-}
-
-object ListParsers1 extends StandardTokenParsers {
- lexical.delimiters ++= List("(", ")", ",")
-
- def expr: Parser[Any] = "(" ~> exprs <~ ")" | ident | numericLit
-
- def exprs: Parser[List[Any]] = expr ~ rep ("," ~> expr) ^^ { case x ~ y => x :: y }
-
- def main(args: Array[String]) {
- val tokens = new lexical.Scanner(args(0))
- println(args(0))
- println(phrase(expr)(tokens))
- }
-}
diff --git a/docs/examples/parsing/MiniML.scala b/docs/examples/parsing/MiniML.scala
deleted file mode 100644
index f7f7172e8d..0000000000
--- a/docs/examples/parsing/MiniML.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-package examples.parsing
-
-import scala.util.parsing.combinator1.syntactical.StandardTokenParsers
-import scala.util.parsing.combinator1.syntactical.StandardTokenParsers
-
-object MiniML extends StandardTokenParsers {
- lexical.delimiters += ("(", ")", ".", "=")
- lexical.reserved += ("lambda", "let", "in")
-
- def expr: Parser[Any] = (
- "let" ~ ident ~ "=" ~ expr ~ "in" ~ expr
- | "lambda" ~ ident ~ "." ~ expr
- | simpleExpr ~ rep(expr)
- )
- def simpleExpr: Parser[Any] = (
- ident
- | "(" ~ expr ~ ")"
- )
-
- def main(args: Array[String]) {
- val tokens = new lexical.Scanner(args(0))
- println(args(0))
- println(phrase(expr)(tokens))
- }
-}
-
-class Expr
-case class Let(x: String, expr: Expr, body: Expr) extends Expr
-case class Lambda(x: String, expr: Expr) extends Expr
-case class Apply(fun: Expr, arg: Expr) extends Expr
-case class Var(x: String) extends Expr
-
-object MiniML1 extends StandardTokenParsers {
- lexical.delimiters += ("(", ")", ".", "=")
- lexical.reserved += ("lambda", "let", "in")
-
- def expr: Parser[Expr] = (
- "let" ~ ident ~ "=" ~ expr ~ "in" ~ expr ^^ { case "let" ~ x ~ "=" ~ e ~ "in" ~ b => Let(x, e, b) }
- | "lambda" ~ ident ~ "." ~ expr ^^ { case "lambda" ~ x ~ "." ~ e => Lambda(x, e) }
- | simpleExpr ~ rep(expr) ^^ { case f ~ as => (f /: as) (Apply) }
- )
- def simpleExpr: Parser[Expr] = (
- ident ^^ { Var }
- | "(" ~> expr <~ ")"
- )
-
- def main(args: Array[String]) {
- val tokens = new lexical.Scanner(args(0))
- println(args(0))
- println(phrase(expr)(tokens))
- }
-}
diff --git a/docs/examples/parsing/lambda/Main.scala b/docs/examples/parsing/lambda/Main.scala
deleted file mode 100644
index 165e82b670..0000000000
--- a/docs/examples/parsing/lambda/Main.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-package examples.parsing.lambda
-
-import scala.util.parsing.combinator.Parsers
-import scala.util.parsing.input.StreamReader
-
-import java.io.File
-import java.io.FileInputStream
-import java.io.InputStreamReader
-
-/**
- * Parser for an untyped lambda calculus
- *
- * Usage: scala examples.parsing.lambda.Main <file>
- *
- * (example files: see test/ *.kwi)
- *
- * @author Miles Sabin (adapted slightly by Adriaan Moors)
- */
-object Main extends Application with TestParser
-{
- override def main(args: Array[String]) =
- {
- val in = StreamReader(new InputStreamReader(new FileInputStream(new File(args(0))), "ISO-8859-1"))
- parse(in) match
- {
- case Success(term, _) =>
- {
- Console.println("Term: \n"+term)
- }
- case Failure(msg, remainder) => Console.println("Failure: "+msg+"\n"+"Remainder: \n"+remainder.pos.longString)
- case Error(msg, remainder) => Console.println("Error: "+msg+"\n"+"Remainder: \n"+remainder.pos.longString)
- }
- }
-}
diff --git a/docs/examples/parsing/lambda/TestParser.scala b/docs/examples/parsing/lambda/TestParser.scala
deleted file mode 100644
index d26589da1b..0000000000
--- a/docs/examples/parsing/lambda/TestParser.scala
+++ /dev/null
@@ -1,68 +0,0 @@
-package examples.parsing.lambda
-
-import scala.util.parsing.input.Reader
-import scala.util.parsing.combinator.lexical.StdLexical
-import scala.util.parsing.combinator.syntactical.StdTokenParsers
-import scala.util.parsing.combinator.ImplicitConversions
-
-/**
- * Parser for an untyped lambda calculus
- *
- * @author Miles Sabin (adapted slightly by Adriaan Moors)
- */
-trait TestParser extends StdTokenParsers with ImplicitConversions with TestSyntax
-{
- type Tokens = StdLexical
- val lexical = new StdLexical
- lexical.reserved ++= List("unit", "let", "in", "if", "then", "else")
- lexical.delimiters ++= List("=>", "->", "==", "(", ")", "=", "\\", "+", "-", "*", "/")
-
-
- def name : Parser[Name] = ident ^^ Name
-
- // meaning of the arguments to the closure during subsequent iterations
- // (...(expr2 op1 expr1) ... op1 expr1)
- // ^a^^^ ^o^ ^b^^^
- // ^^^^^^^a^^^^^^^ ^o^ ^^b^^
- def expr1 : Parser[Term] =
- chainl1(expr2, expr1, op1 ^^ {o => (a: Term, b: Term) => App(App(o, a), b)})
-
- def expr2 : Parser[Term] =
- chainl1(expr3, expr2, op2 ^^ {o => (a: Term, b: Term) => App(App(o, a), b)})
-
- def expr3 : Parser[Term] =
- chainl1(expr4, expr3, op3 ^^ {o => (a: Term, b: Term) => App(App(o, a), b)})
-
- def expr4 : Parser[Term] =
- ( "\\" ~> lambdas
- | ("let" ~> name) ~ ("=" ~> expr1) ~ ("in" ~> expr1) ^^ flatten3(Let)
- | ("if" ~> expr1) ~ ("then" ~> expr1) ~ ("else" ~> expr1) ^^ flatten3(If)
- | chainl1(aexpr, success(App(_: Term, _: Term)))
- )
-
- def lambdas : Parser[Term] =
- name ~ ("->" ~> expr1 | lambdas) ^^ flatten2(Lam)
-
- def aexpr : Parser[Term] =
- ( numericLit ^^ (_.toInt) ^^ Lit
- | name ^^ Ref
- | "unit" ^^^ Unit()
- | "(" ~> expr1 <~ ")"
- )
-
- def op1 : Parser[Term] =
- "==" ^^^ Ref(Name("=="))
-
- def op2 : Parser[Term] =
- ( "+" ^^^ Ref(Name("+"))
- | "-" ^^^ Ref(Name("-"))
- )
-
- def op3 : Parser[Term] =
- ( "*" ^^^ Ref(Name("*"))
- | "/" ^^^ Ref(Name("/"))
- )
-
- def parse(r: Reader[char]) : ParseResult[Term] =
- phrase(expr1)(new lexical.Scanner(r))
-}
diff --git a/docs/examples/parsing/lambda/TestSyntax.scala b/docs/examples/parsing/lambda/TestSyntax.scala
deleted file mode 100644
index 7edca6ccdc..0000000000
--- a/docs/examples/parsing/lambda/TestSyntax.scala
+++ /dev/null
@@ -1,86 +0,0 @@
-package examples.parsing.lambda
-
-/**
- * Parser for an untyped lambda calculus: abstract syntax tree
- *
- * @author Miles Sabin (adapted slightly by Adriaan Moors)
- */
-trait TestSyntax
-{
- trait Term
-
- case class Unit extends Term
- {
- override def toString = "unit"
- }
-
- case class Lit(n: int) extends Term
- {
- override def toString = n.toString
- }
-
- case class Bool(b: boolean) extends Term
- {
- override def toString = b.toString
- }
-
- case class Name(name: String) extends Term
- {
- override def toString = name
- }
-
- case class Ref(n: Name) extends Term
- {
- def value = n
- }
-
- case class Lam(n: Name, l: Term) extends Term
- {
- override def toString = "(\\ "+n+" -> "+l+")"
- }
-
- case class App(t1: Term, t2: Term) extends Term
- {
- override def toString = "("+t1+" "+t2+")"
- }
-
- case class Let(n: Name, t1: Term, t2: Term) extends Term
- {
- override def toString = "let "+n+" = "+t1+" in "+t2
- }
-
- case class If(c: Term, t1: Term, t2: Term) extends Term
- {
- override def toString = "if "+c+" then "+t1+" else "+t2
- }
-
- trait PrimTerm extends Term
- {
- def apply(n: Lit) : Term
- }
-
- case class PrimPlus extends PrimTerm
- {
- def apply(x: Lit) = new PrimTerm { def apply(y: Lit) = Lit(x.n+y.n) }
- }
-
- case class PrimMinus extends PrimTerm
- {
- def apply(x: Lit) = new PrimTerm { def apply(y: Lit) = Lit(x.n-y.n) }
- }
-
- case class PrimMultiply extends PrimTerm
- {
- def apply(x: Lit) = new PrimTerm { def apply(y: Lit) = Lit(x.n*y.n) }
- }
-
- case class PrimDivide extends PrimTerm
- {
- def apply(x: Lit) = new PrimTerm { def apply(y: Lit) = Lit(x.n/y.n) }
- }
-
- case class PrimEquals extends PrimTerm
- {
- def apply(x: Lit) = new PrimTerm { def apply(y: Lit) = Bool(x.n == y.n) }
- }
-}
diff --git a/docs/examples/parsing/lambda/test/test-01.kwi b/docs/examples/parsing/lambda/test/test-01.kwi
deleted file mode 100644
index 9833d10673..0000000000
--- a/docs/examples/parsing/lambda/test/test-01.kwi
+++ /dev/null
@@ -1 +0,0 @@
-let x = 23 in (\y z -> x+y+z) 1 2
diff --git a/docs/examples/parsing/lambda/test/test-02.kwi b/docs/examples/parsing/lambda/test/test-02.kwi
deleted file mode 100644
index 11198c6fc9..0000000000
--- a/docs/examples/parsing/lambda/test/test-02.kwi
+++ /dev/null
@@ -1 +0,0 @@
-let f = (\x y -> x*y) in f 2 3
diff --git a/docs/examples/parsing/lambda/test/test-03.kwi b/docs/examples/parsing/lambda/test/test-03.kwi
deleted file mode 100644
index d4515d7297..0000000000
--- a/docs/examples/parsing/lambda/test/test-03.kwi
+++ /dev/null
@@ -1 +0,0 @@
-let f = (\x y -> x*y) in f (f 1 2) 3
diff --git a/docs/examples/parsing/lambda/test/test-04.kwi b/docs/examples/parsing/lambda/test/test-04.kwi
deleted file mode 100644
index e54c45457a..0000000000
--- a/docs/examples/parsing/lambda/test/test-04.kwi
+++ /dev/null
@@ -1 +0,0 @@
-let fact = \x -> if x == 0 then 1 else x*(fact (x-1)) in unit
diff --git a/docs/examples/parsing/lambda/test/test-05.kwi b/docs/examples/parsing/lambda/test/test-05.kwi
deleted file mode 100644
index 0b95d67846..0000000000
--- a/docs/examples/parsing/lambda/test/test-05.kwi
+++ /dev/null
@@ -1 +0,0 @@
-let fact = \x -> if x == 0 then 1 else x*(fact (x-1)) in fact 6
diff --git a/docs/examples/parsing/lambda/test/test-06.kwi b/docs/examples/parsing/lambda/test/test-06.kwi
deleted file mode 100644
index 47723dc998..0000000000
--- a/docs/examples/parsing/lambda/test/test-06.kwi
+++ /dev/null
@@ -1 +0,0 @@
-2*3+4*5 == 26
diff --git a/docs/examples/parsing/lambda/test/test-07.kwi b/docs/examples/parsing/lambda/test/test-07.kwi
deleted file mode 100644
index 14fba0d66a..0000000000
--- a/docs/examples/parsing/lambda/test/test-07.kwi
+++ /dev/null
@@ -1 +0,0 @@
-let fix = \f -> f(fix f) in unit
diff --git a/docs/examples/parsing/lambda/test/test-08.kwi b/docs/examples/parsing/lambda/test/test-08.kwi
deleted file mode 100644
index 7166d154f0..0000000000
--- a/docs/examples/parsing/lambda/test/test-08.kwi
+++ /dev/null
@@ -1 +0,0 @@
-let fix = (\f -> f(fix f)) in (fix (\g n -> if n == 0 then 1 else n*(g(n-1)))) 5