aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorWenchen Fan <wenchen@databricks.com>2016-06-23 20:20:55 -0700
committerHerman van Hovell <hvanhovell@databricks.com>2016-06-23 20:20:55 -0700
commit6a3c6276f5cef26b0a4fef44c8ad99bbecfe006d (patch)
tree8eba70e6206821a4f569f5b1b1c1278848ad6d35 /sql
parentcc71d4fa372f6eb187c68dbd8358de4003ace3fe (diff)
downloadspark-6a3c6276f5cef26b0a4fef44c8ad99bbecfe006d.tar.gz
spark-6a3c6276f5cef26b0a4fef44c8ad99bbecfe006d.tar.bz2
spark-6a3c6276f5cef26b0a4fef44c8ad99bbecfe006d.zip
[SQL][MINOR] ParserUtils.operationNotAllowed should throw exception directly
## What changes were proposed in this pull request? It's weird that `ParserUtils.operationNotAllowed` returns an exception and the caller throw it. ## How was this patch tested? N/A Author: Wenchen Fan <wenchen@databricks.com> Closes #13874 from cloud-fan/style.
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParserUtils.scala4
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala62
2 files changed, 33 insertions, 33 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParserUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParserUtils.scala
index 9619884ede..b04ce58e23 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParserUtils.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParserUtils.scala
@@ -39,8 +39,8 @@ object ParserUtils {
stream.getText(Interval.of(0, stream.size()))
}
- def operationNotAllowed(message: String, ctx: ParserRuleContext): ParseException = {
- new ParseException(s"Operation not allowed: $message", ctx)
+ def operationNotAllowed(message: String, ctx: ParserRuleContext): Nothing = {
+ throw new ParseException(s"Operation not allowed: $message", ctx)
}
/** Check if duplicate keys exist in a set of key-value pairs. */
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index 2ae8380644..066ff57721 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -168,7 +168,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
val lookupTable = Option(ctx.db) match {
case None => table
case Some(db) if table.database.exists(_ != db) =>
- throw operationNotAllowed(
+ operationNotAllowed(
s"SHOW COLUMNS with conflicting databases: '$db' != '${table.database.get}'",
ctx)
case Some(db) => TableIdentifier(table.identifier, Some(db.getText))
@@ -253,10 +253,10 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
*/
override def visitExplain(ctx: ExplainContext): LogicalPlan = withOrigin(ctx) {
if (ctx.FORMATTED != null) {
- throw operationNotAllowed("EXPLAIN FORMATTED", ctx)
+ operationNotAllowed("EXPLAIN FORMATTED", ctx)
}
if (ctx.LOGICAL != null) {
- throw operationNotAllowed("EXPLAIN LOGICAL", ctx)
+ operationNotAllowed("EXPLAIN LOGICAL", ctx)
}
val statement = plan(ctx.statement)
@@ -304,7 +304,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
val temporary = ctx.TEMPORARY != null
val ifNotExists = ctx.EXISTS != null
if (temporary && ifNotExists) {
- throw operationNotAllowed("CREATE TEMPORARY TABLE ... IF NOT EXISTS", ctx)
+ operationNotAllowed("CREATE TEMPORARY TABLE ... IF NOT EXISTS", ctx)
}
(visitTableIdentifier(ctx.tableIdentifier), temporary, ifNotExists, ctx.EXTERNAL != null)
}
@@ -317,7 +317,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
override def visitCreateTableUsing(ctx: CreateTableUsingContext): LogicalPlan = withOrigin(ctx) {
val (table, temp, ifNotExists, external) = visitCreateTableHeader(ctx.createTableHeader)
if (external) {
- throw operationNotAllowed("CREATE EXTERNAL TABLE ... USING", ctx)
+ operationNotAllowed("CREATE EXTERNAL TABLE ... USING", ctx)
}
val options = Option(ctx.tablePropertyList).map(visitPropertyKeyValues).getOrElse(Map.empty)
val provider = ctx.tableProvider.qualifiedName.getText
@@ -332,7 +332,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
val query = plan(ctx.query)
if (temp) {
- throw operationNotAllowed("CREATE TEMPORARY TABLE ... USING ... AS query", ctx)
+ operationNotAllowed("CREATE TEMPORARY TABLE ... USING ... AS query", ctx)
}
// Determine the storage mode.
@@ -428,7 +428,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
val props = visitTablePropertyList(ctx)
val badKeys = props.filter { case (_, v) => v == null }.keys
if (badKeys.nonEmpty) {
- throw operationNotAllowed(
+ operationNotAllowed(
s"Values must be specified for key(s): ${badKeys.mkString("[", ",", "]")}", ctx)
}
props
@@ -441,7 +441,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
val props = visitTablePropertyList(ctx)
val badKeys = props.filter { case (_, v) => v != null }.keys
if (badKeys.nonEmpty) {
- throw operationNotAllowed(
+ operationNotAllowed(
s"Values should not be specified for key(s): ${badKeys.mkString("[", ",", "]")}", ctx)
}
props.keys.toSeq
@@ -564,7 +564,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
case "jar" | "file" | "archive" =>
FunctionResource(FunctionResourceType.fromString(resourceType), string(resource.STRING))
case other =>
- throw operationNotAllowed(s"CREATE FUNCTION with resource type '$resourceType'", ctx)
+ operationNotAllowed(s"CREATE FUNCTION with resource type '$resourceType'", ctx)
}
}
@@ -600,7 +600,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
*/
override def visitDropTable(ctx: DropTableContext): LogicalPlan = withOrigin(ctx) {
if (ctx.PURGE != null) {
- throw operationNotAllowed("DROP TABLE ... PURGE", ctx)
+ operationNotAllowed("DROP TABLE ... PURGE", ctx)
}
DropTableCommand(
visitTableIdentifier(ctx.tableIdentifier),
@@ -692,7 +692,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
override def visitAddTablePartition(
ctx: AddTablePartitionContext): LogicalPlan = withOrigin(ctx) {
if (ctx.VIEW != null) {
- throw operationNotAllowed("ALTER VIEW ... ADD PARTITION", ctx)
+ operationNotAllowed("ALTER VIEW ... ADD PARTITION", ctx)
}
// Create partition spec to location mapping.
val specsAndLocs = if (ctx.partitionSpec.isEmpty) {
@@ -743,10 +743,10 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
override def visitDropTablePartitions(
ctx: DropTablePartitionsContext): LogicalPlan = withOrigin(ctx) {
if (ctx.VIEW != null) {
- throw operationNotAllowed("ALTER VIEW ... DROP PARTITION", ctx)
+ operationNotAllowed("ALTER VIEW ... DROP PARTITION", ctx)
}
if (ctx.PURGE != null) {
- throw operationNotAllowed("ALTER TABLE ... DROP PARTITION ... PURGE", ctx)
+ operationNotAllowed("ALTER TABLE ... DROP PARTITION ... PURGE", ctx)
}
AlterTableDropPartitionCommand(
visitTableIdentifier(ctx.tableIdentifier),
@@ -789,7 +789,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
.map { orderedIdCtx =>
Option(orderedIdCtx.ordering).map(_.getText).foreach { dir =>
if (dir.toLowerCase != "asc") {
- throw operationNotAllowed(s"Column ordering must be ASC, was '$dir'", ctx)
+ operationNotAllowed(s"Column ordering must be ASC, was '$dir'", ctx)
}
}
@@ -825,7 +825,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
// SET ROLE is the exception to the rule, because we handle this before other SET commands.
"SET ROLE"
}
- throw operationNotAllowed(keywords, ctx)
+ operationNotAllowed(keywords, ctx)
}
/**
@@ -844,7 +844,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
ctx.identifier.getText.toLowerCase match {
case "file" => AddFileCommand(mayebePaths)
case "jar" => AddJarCommand(mayebePaths)
- case other => throw operationNotAllowed(s"ADD with resource type '$other'", ctx)
+ case other => operationNotAllowed(s"ADD with resource type '$other'", ctx)
}
case SqlBaseParser.LIST =>
ctx.identifier.getText.toLowerCase match {
@@ -860,9 +860,9 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
} else {
ListJarsCommand()
}
- case other => throw operationNotAllowed(s"LIST with resource type '$other'", ctx)
+ case other => operationNotAllowed(s"LIST with resource type '$other'", ctx)
}
- case _ => throw operationNotAllowed(s"Other types of operation on resources", ctx)
+ case _ => operationNotAllowed(s"Other types of operation on resources", ctx)
}
}
@@ -898,10 +898,10 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
"Please use CREATE TEMPORARY VIEW as an alternative.", ctx)
}
if (ctx.skewSpec != null) {
- throw operationNotAllowed("CREATE TABLE ... SKEWED BY", ctx)
+ operationNotAllowed("CREATE TABLE ... SKEWED BY", ctx)
}
if (ctx.bucketSpec != null) {
- throw operationNotAllowed("CREATE TABLE ... CLUSTERED BY", ctx)
+ operationNotAllowed("CREATE TABLE ... CLUSTERED BY", ctx)
}
val comment = Option(ctx.STRING).map(string)
val partitionCols = Option(ctx.partitionColumns).toSeq.flatMap(visitCatalogColumns)
@@ -915,14 +915,14 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
val duplicateColumns = colNames.groupBy(identity).collect {
case (x, ys) if ys.length > 1 => "\"" + x + "\""
}
- throw operationNotAllowed(s"Duplicated column names found in table definition of $name: " +
+ operationNotAllowed(s"Duplicated column names found in table definition of $name: " +
duplicateColumns.mkString("[", ",", "]"), ctx)
}
// For Hive tables, partition columns must not be part of the schema
val badPartCols = partitionCols.map(_.name).toSet.intersect(colNames.toSet)
if (badPartCols.nonEmpty) {
- throw operationNotAllowed(s"Partition columns may not be specified in the schema: " +
+ operationNotAllowed(s"Partition columns may not be specified in the schema: " +
badPartCols.map("\"" + _ + "\"").mkString("[", ",", "]"), ctx)
}
@@ -954,7 +954,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
val location = Option(ctx.locationSpec).map(visitLocationSpec)
// If we are creating an EXTERNAL table, then the LOCATION field is required
if (external && location.isEmpty) {
- throw operationNotAllowed("CREATE EXTERNAL TABLE must be accompanied by LOCATION", ctx)
+ operationNotAllowed("CREATE EXTERNAL TABLE must be accompanied by LOCATION", ctx)
}
val storage = CatalogStorageFormat(
locationUri = location,
@@ -985,7 +985,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
case Some(q) =>
// Just use whatever is projected in the select statement as our schema
if (schema.nonEmpty) {
- throw operationNotAllowed(
+ operationNotAllowed(
"Schema may not be specified in a Create Table As Select (CTAS) statement",
ctx)
}
@@ -996,7 +996,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
"Please use the syntax of \"CREATE TABLE tableName USING dataSource " +
"OPTIONS (...) PARTITIONED BY ...\" to create a partitioned table through a " +
"CTAS statement."
- throw operationNotAllowed(errorMessage, ctx)
+ operationNotAllowed(errorMessage, ctx)
}
val hasStorageProperties = (ctx.createFileFormat != null) || (ctx.rowFormat != null)
@@ -1055,7 +1055,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
case (c: GenericFileFormatContext, null) =>
visitGenericFileFormat(c)
case (null, storageHandler) =>
- throw operationNotAllowed("STORED BY", ctx)
+ operationNotAllowed("STORED BY", ctx)
case _ =>
throw new ParseException("Expected either STORED AS or STORED BY, not both", ctx)
}
@@ -1084,7 +1084,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
outputFormat = s.outputFormat,
serde = s.serde)
case None =>
- throw operationNotAllowed(s"STORED AS with file format '$source'", ctx)
+ operationNotAllowed(s"STORED AS with file format '$source'", ctx)
}
}
@@ -1174,14 +1174,14 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
ffGeneric.identifier.getText.toLowerCase match {
case ("sequencefile" | "textfile" | "rcfile") => // OK
case fmt =>
- throw operationNotAllowed(
+ operationNotAllowed(
s"ROW FORMAT SERDE is incompatible with format '$fmt', which also specifies a serde",
parentCtx)
}
case (rfDelimited: RowFormatDelimitedContext, ffGeneric: GenericFileFormatContext) =>
ffGeneric.identifier.getText.toLowerCase match {
case "textfile" => // OK
- case fmt => throw operationNotAllowed(
+ case fmt => operationNotAllowed(
s"ROW FORMAT DELIMITED is only compatible with 'textfile', not '$fmt'", parentCtx)
}
case _ =>
@@ -1189,7 +1189,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
def str(ctx: ParserRuleContext): String = {
(0 until ctx.getChildCount).map { i => ctx.getChild(i).getText }.mkString(" ")
}
- throw operationNotAllowed(
+ operationNotAllowed(
s"Unexpected combination of ${str(rowFormatCtx)} and ${str(createFileFormatCtx)}",
parentCtx)
}
@@ -1209,7 +1209,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
*/
override def visitCreateView(ctx: CreateViewContext): LogicalPlan = withOrigin(ctx) {
if (ctx.identifierList != null) {
- throw operationNotAllowed("CREATE VIEW ... PARTITIONED ON", ctx)
+ operationNotAllowed("CREATE VIEW ... PARTITIONED ON", ctx)
} else {
val identifiers = Option(ctx.identifierCommentList).toSeq.flatMap(_.identifierComment.asScala)
val schema = identifiers.map { ic =>