From 5b1da4217f7f36eab1ba14b5b95667de5bda09ed Mon Sep 17 00:00:00 2001 From: Tiark Rompf Date: Wed, 6 May 2009 14:51:51 +0000 Subject: packrat parsing --- test/files/run/packrat1.check | 7 ++++++ test/files/run/packrat1.scala | 48 +++++++++++++++++++++++++++++++++++ test/files/run/packrat2.check | 7 ++++++ test/files/run/packrat2.scala | 58 +++++++++++++++++++++++++++++++++++++++++++ test/files/run/packrat3.check | 7 ++++++ test/files/run/packrat3.scala | 52 ++++++++++++++++++++++++++++++++++++++ 6 files changed, 179 insertions(+) create mode 100644 test/files/run/packrat1.check create mode 100644 test/files/run/packrat1.scala create mode 100644 test/files/run/packrat2.check create mode 100644 test/files/run/packrat2.scala create mode 100644 test/files/run/packrat3.check create mode 100644 test/files/run/packrat3.scala (limited to 'test') diff --git a/test/files/run/packrat1.check b/test/files/run/packrat1.check new file mode 100644 index 0000000000..e9f797e1b6 --- /dev/null +++ b/test/files/run/packrat1.check @@ -0,0 +1,7 @@ +1 +3 +5 +81 +4 +37 +9 diff --git a/test/files/run/packrat1.scala b/test/files/run/packrat1.scala new file mode 100644 index 0000000000..69eb8c5fc9 --- /dev/null +++ b/test/files/run/packrat1.scala @@ -0,0 +1,48 @@ +import scala.util.parsing.combinator._ + +import scala.util.parsing.combinator.syntactical.StandardTokenParsers +import scala.util.parsing.input._ +import scala.util.parsing.syntax._ + +import scala.collection.mutable.HashMap + +object Test extends Application{ + import grammars._ + + val head = phrase(term) + + println(extractResult(head(new lexical.Scanner("1")))) + println(extractResult(head(new lexical.Scanner("1+2")))) + println(extractResult(head(new lexical.Scanner("9-4")))) + println(extractResult(head(new lexical.Scanner("9*9")))) + println(extractResult(head(new lexical.Scanner("8/2")))) + println(extractResult(head(new lexical.Scanner("4*9-0/7+9-8*1")))) + println(extractResult(head(new lexical.Scanner("(1+2)*3")))) +} + +object grammars extends StandardTokenParsers with PackratParsers{ + + def extractResult(r : ParseResult[_]) = r match{ + case Success(a,_) => a + case Failure(a,_) => a + case Error(a,_) => a + } + + lexical.delimiters ++= List("+","-","*","/","(",")") + lexical.reserved ++= List("Hello","World") + + /**** + * term = term + fact | term - fact | fact + * fact = fact * num | fact / num | num + */ + + + val term: PackratParser[Int] = (term~("+"~>fact) ^^ {case x~y => x+y} + |term~("-"~>fact) ^^ {case x~y => x-y} + |fact) + + val fact: PackratParser[Int] = (fact~("*"~>numericLit) ^^ {case x~y => x*y.toInt} + |fact~("/"~>numericLit) ^^ {case x~y => x/y.toInt} + |"("~>term<~")" + |numericLit ^^ {_.toInt}) + } diff --git a/test/files/run/packrat2.check b/test/files/run/packrat2.check new file mode 100644 index 0000000000..55a32ac58b --- /dev/null +++ b/test/files/run/packrat2.check @@ -0,0 +1,7 @@ +1 +3 +81 +43 +59 +188 +960 diff --git a/test/files/run/packrat2.scala b/test/files/run/packrat2.scala new file mode 100644 index 0000000000..3361552561 --- /dev/null +++ b/test/files/run/packrat2.scala @@ -0,0 +1,58 @@ +import scala.util.parsing.combinator._ + +import scala.util.parsing.combinator.syntactical.StandardTokenParsers +import scala.util.parsing.input._ +import scala.util.parsing.syntax._ + +import scala.collection.mutable.HashMap + +object Test extends Application{ + import grammars2._ + + val head = phrase(exp) + + println(extractResult(head(new lexical.Scanner("1")))) + println(extractResult(head(new lexical.Scanner("1+2")))) + println(extractResult(head(new lexical.Scanner("9*9")))) + println(extractResult(head(new lexical.Scanner("4*9+7")))) + println(extractResult(head(new lexical.Scanner("4*9+7*2+3*3")))) + println(extractResult(head(new lexical.Scanner("4*9+7*2+3*3+9*5+7*6*2")))) + println(extractResult(head(new lexical.Scanner("4*(9+7)*(2+3)*3")))) + +} + +object grammars2 extends StandardTokenParsers with PackratParsers{ + + def extractResult(r : ParseResult[_]) = r match{ + case Success(a,_) => a + case Failure(a,_) => a + case Error(a,_) => a + } + + lexical.delimiters ++= List("+","-","*","/","(",")") + lexical.reserved ++= List("Hello","World") + + /* + * exp = sum | prod | num + * sum = exp ~ "+" ~ num + * prod = exp ~ "*" ~ num + */ + + val exp : PackratParser[Int] = sum | prod | numericLit ^^{_.toInt} | "("~>exp<~")" + val sum : PackratParser[Int] = exp~("+"~>exp) ^^ {case x~y => x+y} + val prod: PackratParser[Int] = exp~("*"~>(numericLit ^^{_.toInt} | exp)) ^^ {case x~y => x*y} + + + /* lexical.reserved ++= List("a","b", "c") + val a : PackratParser[Any] = numericLit^^{x => primeFactors(x.toInt)} + val b : PackratParser[Any] = memo("b") + val c : PackratParser[Any] = memo("c") + val AnBnCn : PackratParser[Any] = + parseButDontEat(repMany1(a,b))~not(b)~>rep1(a)~repMany1(b,c)// ^^{case x~y => x:::y} + //val c : PackratParser[Any] = parseButDontEat(a)~a~a + //println(c((new PackratReader(new lexical.Scanner("45 24"))))) + val r = new PackratReader(new lexical.Scanner("45 b c")) + println(AnBnCn(r)) + println(r.getCache.size) +*/ +} \ No newline at end of file diff --git a/test/files/run/packrat3.check b/test/files/run/packrat3.check new file mode 100644 index 0000000000..4d84623ce6 --- /dev/null +++ b/test/files/run/packrat3.check @@ -0,0 +1,7 @@ +(((List(a, b)~())~List(a))~List(b, c)) +(((List(a, a, b, b)~())~List(a, a))~List(b, b, c, c)) +(((List(a, a, a, b, b, b)~())~List(a, a, a))~List(b, b, b, c, c, c)) +(((List(a, a, a, a, b, b, b, b)~())~List(a, a, a, a))~List(b, b, b, b, c, c, c, c)) +Expected failure +``b'' expected but `c' found +``c'' expected but EOF found diff --git a/test/files/run/packrat3.scala b/test/files/run/packrat3.scala new file mode 100644 index 0000000000..34695ef2ed --- /dev/null +++ b/test/files/run/packrat3.scala @@ -0,0 +1,52 @@ +import scala.util.parsing.combinator._ + +import scala.util.parsing.combinator.syntactical.StandardTokenParsers +import scala.util.parsing.input._ +import scala.util.parsing.syntax._ + +import scala.collection.mutable.HashMap + +object Test { + def main(args: Array[String]): Unit = { + import grammars3._ + + val head = phrase(AnBnCn) + + println(extractResult(head(new lexical.Scanner("a b c")))) + println(extractResult(head(new lexical.Scanner("a a b b c c")))) + println(extractResult(head(new lexical.Scanner("a a a b b b c c c")))) + println(extractResult(head(new lexical.Scanner("a a a a b b b b c c c c")))) + + println(extractResult(head(new lexical.Scanner("a a a b b b b c c c c")))) + println(extractResult(head(new lexical.Scanner("a a a a b b b c c c c")))) + println(extractResult(head(new lexical.Scanner("a a a a b b b b c c c")))) + } +} + +object grammars3 extends StandardTokenParsers with PackratParsers { + + def extractResult(r: ParseResult[_]) = r match { + case Success(a,_) => a + case Failure(a,_) => a + case Error(a,_) => a + } + + + lexical.reserved ++= List("a","b", "c") + val a: PackratParser[Any] = memo("a") + val b: PackratParser[Any] = memo("b") + val c: PackratParser[Any] = memo("c") + + val AnBnCn: PackratParser[Any] = + guard(repMany1(a,b) ~ not(b)) ~ rep1(a) ~ repMany1(b,c)// ^^{case x~y => x:::y} + + + private def repMany[T](p: => Parser[T], q: => Parser[T]): Parser[List[T]] = + ( p~repMany(p,q)~q ^^ {case x~xs~y => x::xs:::(y::Nil)} + | success(Nil) + ) + + def repMany1[T](p: => Parser[T], q: => Parser[T]): Parser[List[T]] = + p~opt(repMany(p,q))~q ^^ {case x~Some(xs)~y => x::xs:::(y::Nil)} + +} -- cgit v1.2.3