aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorWenchen Fan <wenchen@databricks.com>2015-11-06 06:38:49 -0800
committerReynold Xin <rxin@databricks.com>2015-11-06 06:38:49 -0800
commit253e87e8ab8717ffef40a6d0d376b1add155ef90 (patch)
tree76276c13422fa8c5882df41fc7a6b5d13ab454be
parentbc5d6c03893a9bd340d6b94d3550e25648412241 (diff)
downloadspark-253e87e8ab8717ffef40a6d0d376b1add155ef90.tar.gz
spark-253e87e8ab8717ffef40a6d0d376b1add155ef90.tar.bz2
spark-253e87e8ab8717ffef40a6d0d376b1add155ef90.zip
[SPARK-11453][SQL][FOLLOW-UP] remove DecimalLit
A cleanup for https://github.com/apache/spark/pull/9085. The `DecimalLit` is very similar to `FloatLit`, we can just keep one of them. Also added low level unit test at `SqlParserSuite` Author: Wenchen Fan <wenchen@databricks.com> Closes #9482 from cloud-fan/parser.
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala23
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala20
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SqlParserSuite.scala21
3 files changed, 35 insertions, 29 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala
index 04ac4f20c6..bdc52c08ac 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala
@@ -78,10 +78,6 @@ private[sql] abstract class AbstractSparkSQLParser
}
class SqlLexical extends StdLexical {
- case class FloatLit(chars: String) extends Token {
- override def toString: String = chars
- }
-
case class DecimalLit(chars: String) extends Token {
override def toString: String = chars
}
@@ -106,17 +102,16 @@ class SqlLexical extends StdLexical {
}
override lazy val token: Parser[Token] =
- ( rep1(digit) ~ ('.' ~> digit.*).? ~ (exp ~> sign.? ~ rep1(digit)) ^^ {
- case i ~ None ~ (sig ~ rest) =>
- DecimalLit(i.mkString + "e" + sig.mkString + rest.mkString)
- case i ~ Some(d) ~ (sig ~ rest) =>
- DecimalLit(i.mkString + "." + d.mkString + "e" + sig.mkString + rest.mkString)
- }
+ ( rep1(digit) ~ scientificNotation ^^ { case i ~ s => DecimalLit(i.mkString + s) }
+ | '.' ~> (rep1(digit) ~ scientificNotation) ^^
+ { case i ~ s => DecimalLit("0." + i.mkString + s) }
+ | rep1(digit) ~ ('.' ~> digit.*) ~ scientificNotation ^^
+ { case i1 ~ i2 ~ s => DecimalLit(i1.mkString + "." + i2.mkString + s) }
| digit.* ~ identChar ~ (identChar | digit).* ^^
{ case first ~ middle ~ rest => processIdent((first ++ (middle :: rest)).mkString) }
| rep1(digit) ~ ('.' ~> digit.*).? ^^ {
case i ~ None => NumericLit(i.mkString)
- case i ~ Some(d) => FloatLit(i.mkString + "." + d.mkString)
+ case i ~ Some(d) => DecimalLit(i.mkString + "." + d.mkString)
}
| '\'' ~> chrExcept('\'', '\n', EofCh).* <~ '\'' ^^
{ case chars => StringLit(chars mkString "") }
@@ -133,8 +128,10 @@ class SqlLexical extends StdLexical {
override def identChar: Parser[Elem] = letter | elem('_')
- private lazy val sign: Parser[Elem] = elem("s", c => c == '+' || c == '-')
- private lazy val exp: Parser[Elem] = elem("e", c => c == 'E' || c == 'e')
+ private lazy val scientificNotation: Parser[String] =
+ (elem('e') | elem('E')) ~> (elem('+') | elem('-')).? ~ rep1(digit) ^^ {
+ case s ~ rest => "e" + s.mkString + rest.mkString
+ }
override def whitespace: Parser[Any] =
( whitespaceChar
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala
index 440e9e28fa..cd717c09f8 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala
@@ -334,27 +334,15 @@ object SqlParser extends AbstractSparkSQLParser with DataTypeParser {
protected lazy val numericLiteral: Parser[Literal] =
( integral ^^ { case i => Literal(toNarrowestIntegerType(i)) }
- | sign.? ~ unsignedFloat ^^ {
- case s ~ f => Literal(toDecimalOrDouble(s.getOrElse("") + f))
- }
- | sign.? ~ unsignedDecimal ^^ {
- case s ~ d => Literal(toDecimalOrDouble(s.getOrElse("") + d))
- }
+ | sign.? ~ unsignedFloat ^^
+ { case s ~ f => Literal(toDecimalOrDouble(s.getOrElse("") + f)) }
)
protected lazy val unsignedFloat: Parser[String] =
( "." ~> numericLit ^^ { u => "0." + u }
- | elem("decimal", _.isInstanceOf[lexical.FloatLit]) ^^ (_.chars)
+ | elem("decimal", _.isInstanceOf[lexical.DecimalLit]) ^^ (_.chars)
)
- protected lazy val unsignedDecimal: Parser[String] =
- ( "." ~> decimalLit ^^ { u => "0." + u }
- | elem("scientific_notation", _.isInstanceOf[lexical.DecimalLit]) ^^ (_.chars)
- )
-
- def decimalLit: Parser[String] =
- elem("scientific_notation", _.isInstanceOf[lexical.DecimalLit]) ^^ (_.chars)
-
protected lazy val sign: Parser[String] = ("+" | "-")
protected lazy val integral: Parser[String] =
@@ -477,7 +465,7 @@ object SqlParser extends AbstractSparkSQLParser with DataTypeParser {
protected lazy val baseExpression: Parser[Expression] =
( "*" ^^^ UnresolvedStar(None)
- | (ident <~ "."). + <~ "*" ^^ { case target => UnresolvedStar(Option(target))}
+ | rep1(ident <~ ".") <~ "*" ^^ { case target => UnresolvedStar(Option(target))}
| primary
)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SqlParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SqlParserSuite.scala
index ea28bfa021..9ff893b847 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SqlParserSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SqlParserSuite.scala
@@ -126,4 +126,25 @@ class SqlParserSuite extends PlanTest {
checkSingleUnit("13.123456789", "second")
checkSingleUnit("-13.123456789", "second")
}
+
+ test("support scientific notation") {
+ def assertRight(input: String, output: Double): Unit = {
+ val parsed = SqlParser.parse("SELECT " + input)
+ val expected = Project(
+ UnresolvedAlias(
+ Literal(output)
+ ) :: Nil,
+ OneRowRelation)
+ comparePlans(parsed, expected)
+ }
+
+ assertRight("9.0e1", 90)
+ assertRight(".9e+2", 90)
+ assertRight("0.9e+2", 90)
+ assertRight("900e-1", 90)
+ assertRight("900.0E-1", 90)
+ assertRight("9.e+1", 90)
+
+ intercept[RuntimeException](SqlParser.parse("SELECT .e3"))
+ }
}