diff options
author | Reynold Xin <rxin@databricks.com> | 2016-01-18 17:10:32 -0800 |
---|---|---|
committer | Reynold Xin <rxin@databricks.com> | 2016-01-18 17:10:32 -0800 |
commit | 39ac56fc60734d0e095314fc38a7b36fbb4c80f7 (patch) | |
tree | 3fda9e402c94b26bd9847261827d02e3a88a9c4a /sql/core | |
parent | 721845c1b64fd6e3b911bd77c94e01dc4e5fd102 (diff) | |
download | spark-39ac56fc60734d0e095314fc38a7b36fbb4c80f7.tar.gz spark-39ac56fc60734d0e095314fc38a7b36fbb4c80f7.tar.bz2 spark-39ac56fc60734d0e095314fc38a7b36fbb4c80f7.zip |
[SPARK-12889][SQL] Rename ParserDialect -> ParserInterface.
Based on discussions in #10801, I'm submitting a pull request to rename ParserDialect to ParserInterface.
Author: Reynold Xin <rxin@databricks.com>
Closes #10817 from rxin/SPARK-12889.
Diffstat (limited to 'sql/core')
3 files changed, 5 insertions, 5 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala index b8c8c78b91..147e3557b6 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala @@ -203,7 +203,7 @@ class SQLContext private[sql]( protected[sql] lazy val optimizer: Optimizer = new SparkOptimizer(this) @transient - protected[sql] val sqlParser: ParserDialect = new SparkSQLParser(new SparkQl(conf)) + protected[sql] val sqlParser: ParserInterface = new SparkSQLParser(new SparkQl(conf)) @transient protected[sql] val ddlParser: DDLParser = new DDLParser(sqlParser) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSQLParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSQLParser.scala index 1af2c756cd..d2d8271563 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSQLParser.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSQLParser.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.execution import scala.util.parsing.combinator.RegexParsers -import org.apache.spark.sql.catalyst.{AbstractSparkSQLParser, ParserDialect, TableIdentifier} +import org.apache.spark.sql.catalyst.{AbstractSparkSQLParser, ParserInterface, TableIdentifier} import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, Expression} import org.apache.spark.sql.catalyst.plans.logical import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan @@ -33,7 +33,7 @@ import org.apache.spark.sql.types.StringType * parameter because this allows us to return a different dialect if we * have to. */ -class SparkSQLParser(fallback: => ParserDialect) extends AbstractSparkSQLParser { +class SparkSQLParser(fallback: => ParserInterface) extends AbstractSparkSQLParser { override def parseExpression(sql: String): Expression = fallback.parseExpression(sql) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DDLParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DDLParser.scala index 4dea947f6a..f4766b0370 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DDLParser.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DDLParser.scala @@ -22,7 +22,7 @@ import scala.util.matching.Regex import org.apache.spark.Logging import org.apache.spark.sql.SaveMode -import org.apache.spark.sql.catalyst.{AbstractSparkSQLParser, ParserDialect, TableIdentifier} +import org.apache.spark.sql.catalyst.{AbstractSparkSQLParser, ParserInterface, TableIdentifier} import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation import org.apache.spark.sql.catalyst.expressions.Expression import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan @@ -32,7 +32,7 @@ import org.apache.spark.sql.types._ /** * A parser for foreign DDL commands. */ -class DDLParser(fallback: => ParserDialect) +class DDLParser(fallback: => ParserInterface) extends AbstractSparkSQLParser with DataTypeParser with Logging { override def parseExpression(sql: String): Expression = fallback.parseExpression(sql) |