aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorHerman van Hovell <hvanhovell@databricks.com>2016-08-16 21:35:39 -0700
committerReynold Xin <rxin@databricks.com>2016-08-16 21:35:39 -0700
commit4a2c375be2bcd98cc7e00bea920fd6a0f68a4e14 (patch)
tree1db9080a62e4a7972155a42a0526f6b8e63ba777
parente28a8c5899c48ff065e2fd3bb6b10c82b4d39c2c (diff)
downloadspark-4a2c375be2bcd98cc7e00bea920fd6a0f68a4e14.tar.gz
spark-4a2c375be2bcd98cc7e00bea920fd6a0f68a4e14.tar.bz2
spark-4a2c375be2bcd98cc7e00bea920fd6a0f68a4e14.zip
[SPARK-17084][SQL] Rename ParserUtils.assert to validate
## What changes were proposed in this pull request? This PR renames `ParserUtils.assert` to `ParserUtils.validate`. This is done because this method is used to check requirements, and not to check if the program is in an invalid state. ## How was this patch tested? Simple rename. Compilation should do. Author: Herman van Hovell <hvanhovell@databricks.com> Closes #14665 from hvanhovell/SPARK-17084.
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala14
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParserUtils.scala4
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala5
3 files changed, 11 insertions, 12 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index 25c8445b4d..09b650ce18 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -132,7 +132,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
// Build the insert clauses.
val inserts = ctx.multiInsertQueryBody.asScala.map {
body =>
- assert(body.querySpecification.fromClause == null,
+ validate(body.querySpecification.fromClause == null,
"Multi-Insert queries cannot have a FROM clause in their individual SELECT statements",
body)
@@ -596,7 +596,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
// function takes X PERCENT as the input and the range of X is [0, 100], we need to
// adjust the fraction.
val eps = RandomSampler.roundingEpsilon
- assert(fraction >= 0.0 - eps && fraction <= 1.0 + eps,
+ validate(fraction >= 0.0 - eps && fraction <= 1.0 + eps,
s"Sampling fraction ($fraction) must be on interval [0, 1]",
ctx)
Sample(0.0, fraction, withReplacement = false, (math.random * 1000).toInt, query)(true)
@@ -664,7 +664,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
// Get the backing expressions.
val expressions = ctx.expression.asScala.map { eCtx =>
val e = expression(eCtx)
- assert(e.foldable, "All expressions in an inline table must be constants.", eCtx)
+ validate(e.foldable, "All expressions in an inline table must be constants.", eCtx)
e
}
@@ -686,7 +686,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
val baseAttributes = structType.toAttributes.map(_.withNullability(true))
val attributes = if (ctx.identifierList != null) {
val aliases = visitIdentifierList(ctx.identifierList)
- assert(aliases.size == baseAttributes.size,
+ validate(aliases.size == baseAttributes.size,
"Number of aliases must match the number of fields in an inline table.", ctx)
baseAttributes.zip(aliases).map(p => p._1.withName(p._2))
} else {
@@ -1094,7 +1094,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
// We currently only allow foldable integers.
def value: Int = {
val e = expression(ctx.expression)
- assert(e.resolved && e.foldable && e.dataType == IntegerType,
+ validate(e.resolved && e.foldable && e.dataType == IntegerType,
"Frame bound value must be a constant integer.",
ctx)
e.eval().asInstanceOf[Int]
@@ -1347,7 +1347,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
*/
override def visitInterval(ctx: IntervalContext): Literal = withOrigin(ctx) {
val intervals = ctx.intervalField.asScala.map(visitIntervalField)
- assert(intervals.nonEmpty, "at least one time unit should be given for interval literal", ctx)
+ validate(intervals.nonEmpty, "at least one time unit should be given for interval literal", ctx)
Literal(intervals.reduce(_.add(_)))
}
@@ -1374,7 +1374,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
case (from, Some(t)) =>
throw new ParseException(s"Intervals FROM $from TO $t are not supported.", ctx)
}
- assert(interval != null, "No interval can be constructed", ctx)
+ validate(interval != null, "No interval can be constructed", ctx)
interval
} catch {
// Handle Exceptions thrown by CalendarInterval
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParserUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParserUtils.scala
index b04ce58e23..bc35ae2f55 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParserUtils.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParserUtils.scala
@@ -77,8 +77,8 @@ object ParserUtils {
Origin(Option(token.getLine), Option(token.getCharPositionInLine))
}
- /** Assert if a condition holds. If it doesn't throw a parse exception. */
- def assert(f: => Boolean, message: String, ctx: ParserRuleContext): Unit = {
+ /** Validate the condition. If it doesn't throw a parse exception. */
+ def validate(f: => Boolean, message: String, ctx: ParserRuleContext): Unit = {
if (!f) {
throw new ParseException(message, ctx)
}
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index 9da2b5a254..71c3bd31e0 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -18,7 +18,6 @@
package org.apache.spark.sql.execution
import scala.collection.JavaConverters._
-import scala.util.Try
import org.antlr.v4.runtime.{ParserRuleContext, Token}
import org.antlr.v4.runtime.tree.TerminalNode
@@ -799,7 +798,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
}
/**
- * Create an [[AlterTableDiscoverPartitionsCommand]] command
+ * Create an [[AlterTableRecoverPartitionsCommand]] command
*
* For example:
* {{{
@@ -1182,7 +1181,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
entry("mapkey.delim", ctx.keysTerminatedBy) ++
Option(ctx.linesSeparatedBy).toSeq.map { token =>
val value = string(token)
- assert(
+ validate(
value == "\n",
s"LINES TERMINATED BY only supports newline '\\n' right now: $value",
ctx)