summaryrefslogtreecommitdiff
path: root/docs/examples
diff options
context:
space:
mode:
authorMartin Odersky <odersky@gmail.com>2007-09-16 18:01:02 +0000
committerMartin Odersky <odersky@gmail.com>2007-09-16 18:01:02 +0000
commit7fe7bace8a7a55d888f4ebdce8d778ceb4384546 (patch)
tree3d1041a517a92f2a05f48ff80be116f316aadef1 /docs/examples
parent0b2f65aa6c6b3bb3e7628720893fb8116fdc8f71 (diff)
downloadscala-7fe7bace8a7a55d888f4ebdce8d778ceb4384546.tar.gz
scala-7fe7bace8a7a55d888f4ebdce8d778ceb4384546.tar.bz2
scala-7fe7bace8a7a55d888f4ebdce8d778ceb4384546.zip
combinator1 parser changes + examples added
Diffstat (limited to 'docs/examples')
-rwxr-xr-xdocs/examples/parsing/ArithmeticParsers.scala70
-rwxr-xr-xdocs/examples/parsing/JSON.scala44
-rw-r--r--docs/examples/parsing/ListParsers.scala30
-rw-r--r--docs/examples/parsing/MiniML.scala52
4 files changed, 196 insertions, 0 deletions
diff --git a/docs/examples/parsing/ArithmeticParsers.scala b/docs/examples/parsing/ArithmeticParsers.scala
new file mode 100755
index 0000000000..8fb3af7acb
--- /dev/null
+++ b/docs/examples/parsing/ArithmeticParsers.scala
@@ -0,0 +1,70 @@
+package examples.parsing
+
+import scala.util.parsing.combinator1.syntactical.StandardTokenParsers
+
+object ArithmeticParsers extends StandardTokenParsers {
+ lexical.delimiters ++= List("(", ")", "+", "-", "*", "/")
+
+ def expr: Parser[Any] = term ~ rep("+" ~ term | "-" ~ term)
+ def term = factor ~ rep("*" ~ factor | "/" ~ factor)
+ def factor: Parser[Any] = "(" ~ expr ~ ")" | numericLit
+
+ def main(args: Array[String]) {
+ val tokens = new lexical.Scanner(args(0))
+ println(args(0))
+ println(phrase(expr)(tokens))
+ }
+}
+
+object ArithmeticParsers1 extends StandardTokenParsers {
+ lexical.delimiters ++= List("(", ")", "+", "-", "*", "/")
+
+ val reduceList: Int ~ List[String ~ Int] => Int = {
+ case i ~ ps => (i /: ps)(reduce)
+ }
+
+ def reduce(x: Int, r: String ~ Int) = (r: @unchecked) match {
+ case "+" ~ y => x + y
+ case "-" ~ y => x - y
+ case "*" ~ y => x * y
+ case "/" ~ y => x / y
+ }
+
+ def expr : Parser[Int] = term ~ rep ("+" ~ term | "-" ~ term) ^^ reduceList
+ def term : Parser[Int] = factor ~ rep ("*" ~ factor | "/" ~ factor) ^^ reduceList
+ def factor: Parser[Int] = "(" ~> expr <~ ")" | numericLit ^^ (_.toInt)
+
+ def main(args: Array[String]) {
+ val tokens = new lexical.Scanner(args(0))
+ println(args(0))
+ println(phrase(expr)(tokens))
+ }
+}
+
+class Expr
+case class BinOp(op: String, l: Expr, r: Expr) extends Expr
+case class Num(n: Int) extends Expr
+
+object ArithmeticParsers2 extends StandardTokenParsers {
+ lexical.delimiters ++= List("(", ")", "+", "-", "*", "/")
+
+ val reduceList: Expr ~ List[String ~ Expr] => Expr = {
+ case i ~ ps => (i /: ps)(reduce)
+ }
+
+ def reduce(l: Expr, r: String ~ Expr) = BinOp(r._1, l, r._2)
+ def mkNum(s: String) = Num(s.toInt)
+
+ def expr : Parser[Expr] = term ~ rep ("+" ~ term | "-" ~ term) ^^ reduceList
+ def term : Parser[Expr] = factor ~ rep ("*" ~ factor | "/" ~ factor) ^^ reduceList
+ def factor: Parser[Expr] = "(" ~> expr <~ ")" | numericLit ^^ ((s: String) => Num(s.toInt))
+
+ def main(args: Array[String]) {
+ val parse = phrase(expr)
+ val tokens = new lexical.Scanner(args(0))
+ println(args(0))
+ println(parse(tokens))
+ }
+}
+
+
diff --git a/docs/examples/parsing/JSON.scala b/docs/examples/parsing/JSON.scala
new file mode 100755
index 0000000000..bbba25f744
--- /dev/null
+++ b/docs/examples/parsing/JSON.scala
@@ -0,0 +1,44 @@
+package examples.parsing
+
+import scala.util.parsing.combinator1.syntactical.StandardTokenParsers
+
+object JSON extends StandardTokenParsers {
+ lexical.delimiters += ("{", "}", "[", "]", ":", ",")
+ lexical.reserved += ("null", "true", "false")
+
+ def obj : Parser[Any] = "{" ~ repsep(member, ",") ~ "}"
+ def arr : Parser[Any] = "[" ~ repsep(value, ",") ~ "]"
+ def member: Parser[Any] = ident ~ ":" ~ value
+ def value : Parser[Any] = ident | numericLit | obj | arr |
+ "null" | "true" | "false"
+
+ def main(args: Array[String]) {
+ val tokens = new lexical.Scanner(args(0))
+ println(args(0))
+ println(phrase(value)(tokens))
+ }
+}
+object JSON1 extends StandardTokenParsers {
+ lexical.delimiters += ("{", "}", "[", "]", ":", ",")
+ lexical.reserved += ("null", "true", "false")
+
+ def obj: Parser[Map[String, Any]] =
+ "{" ~> repsep(member, ",") <~ "}" ^^ (Map() ++ _)
+
+ def arr: Parser[List[Any]] =
+ "[" ~> repsep(value, ",") <~ "]"
+
+ def member: Parser[(String, Any)] =
+ ident ~ ":" ~ value ^^ { case name ~ ":" ~ value => (name -> value) }
+
+ def value: Parser[Any] =
+ ident | numericLit ^^ (_.toInt) | obj | arr |
+ "null" ^^^ null | "true" ^^^ true | "false" ^^^ false
+
+ def main(args: Array[String]) {
+ val tokens = new lexical.Scanner(args(0))
+ println(args(0))
+ println(phrase(value)(tokens))
+ }
+}
+
diff --git a/docs/examples/parsing/ListParsers.scala b/docs/examples/parsing/ListParsers.scala
new file mode 100644
index 0000000000..f503a0139f
--- /dev/null
+++ b/docs/examples/parsing/ListParsers.scala
@@ -0,0 +1,30 @@
+package examples.parsing
+
+import scala.util.parsing.combinator1.syntactical.StandardTokenParsers
+
+object ListParsers extends StandardTokenParsers {
+ lexical.delimiters ++= List("(", ")", ",")
+
+ def expr: Parser[Any] = "(" ~ exprs ~ ")" | ident | numericLit
+ def exprs: Parser[Any] = expr ~ rep ("," ~ expr)
+
+ def main(args: Array[String]) {
+ val tokens = new lexical.Scanner(args(0))
+ println(args(0))
+ println(phrase(expr)(tokens))
+ }
+}
+
+object ListParsers1 extends StandardTokenParsers {
+ lexical.delimiters ++= List("(", ")", ",")
+
+ def expr: Parser[Any] = "(" ~> exprs <~ ")" | ident | numericLit
+
+ def exprs: Parser[List[Any]] = expr ~ rep ("," ~> expr) ^^ { case x ~ y => x :: y }
+
+ def main(args: Array[String]) {
+ val tokens = new lexical.Scanner(args(0))
+ println(args(0))
+ println(phrase(expr)(tokens))
+ }
+}
diff --git a/docs/examples/parsing/MiniML.scala b/docs/examples/parsing/MiniML.scala
new file mode 100644
index 0000000000..ffc7c2ac92
--- /dev/null
+++ b/docs/examples/parsing/MiniML.scala
@@ -0,0 +1,52 @@
+package examples.parsing
+
+import scala.util.parsing.combinator1.syntactical.StandardTokenParsers
+import scala.util.parsing.combinator1.syntactical.StandardTokenParsers
+
+object MiniML extends StandardTokenParsers {
+ lexical.delimiters += ("(", ")", ".", "=")
+ lexical.reserved += ("lambda", "let", "in")
+
+ def expr: Parser[Any] = (
+ "let" ~ ident ~ "=" ~ expr ~ "in" ~ expr
+ | "lambda" ~ ident ~ "." ~ expr
+ | simpleExpr ~ rep(expr)
+ )
+ def simpleExpr: Parser[Any] = (
+ ident
+ | "(" ~ expr ~ ")"
+ )
+
+ def main(args: Array[String]) {
+ val tokens = new lexical.Scanner(args(0))
+ println(args(0))
+ println(phrase(expr)(tokens))
+ }
+}
+
+class Expr
+case class Let(x: String, expr: Expr, body: Expr) extends Expr
+case class Lambda(x: String, expr: Expr) extends Expr
+case class Apply(fun: Expr, arg: Expr) extends Expr
+case class Var(x: String) extends Expr
+
+object MiniML1 extends StandardTokenParsers {
+ lexical.delimiters += ("(", ")", ".", "=")
+ lexical.reserved += ("lambda", "let", "in")
+
+ def expr: Parser[Expr] = (
+ "let" ~ ident ~ "=" ~ expr ~ "in" ~ expr ^^ { case "let" ~ x ~ "=" ~ e ~ "in" ~ b => Let(x, e, b) }
+ | "lambda" ~ ident ~ "." ~ expr ^^ { case "lambda" ~ x ~ "." ~ e => Lambda(x, e) }
+ | simpleExpr ~ rep(expr) ^^ { case f ~ as => (f /: as) (Apply) }
+ )
+ def simpleExpr: Parser[Expr] = (
+ ident ^^ { Var }
+ | "(" ~> expr <~ ")"
+ )
+
+ def main(args: Array[String]) {
+ val tokens = new lexical.Scanner(args(0))
+ println(args(0))
+ println(phrase(expr)(tokens))
+ }
+}