aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst
diff options
context:
space:
mode:
authorscwf <wangfei1@huawei.com>2015-01-10 14:08:04 -0800
committerMichael Armbrust <michael@databricks.com>2015-01-10 14:08:04 -0800
commitb3e86dc62476abb03b330f86a788aa19a6565317 (patch)
tree9a3f25d6e6e4bd92d8c14a711d9bd0c1d57f38ac /sql/catalyst
parent693a323a70aba91e6c100dd5561d218a75b7895e (diff)
downloadspark-b3e86dc62476abb03b330f86a788aa19a6565317.tar.gz
spark-b3e86dc62476abb03b330f86a788aa19a6565317.tar.bz2
spark-b3e86dc62476abb03b330f86a788aa19a6565317.zip
[SPARK-4861][SQL] Refactory command in spark sql
Follow up for #3712. This PR finally remove ```CommandStrategy``` and make all commands follow ```RunnableCommand``` so they can go with ```case r: RunnableCommand => ExecutedCommand(r) :: Nil```. One exception is the ```DescribeCommand``` of hive, which is a special case and need to distinguish hive table and temporary table, so still keep ```HiveCommandStrategy``` here. Author: scwf <wangfei1@huawei.com> Closes #3948 from scwf/followup-SPARK-4861 and squashes the following commits: 6b48e64 [scwf] minor style fix 2c62e9d [scwf] fix for hive module 5a7a819 [scwf] Refactory command in spark sql
Diffstat (limited to 'sql/catalyst')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala (renamed from sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SparkSQLParser.scala)69
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/commands.scala48
2 files changed, 1 insertions, 116 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SparkSQLParser.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala
index f1a1ca6616..93d74adbcc 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SparkSQLParser.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala
@@ -105,72 +105,3 @@ class SqlLexical(val keywords: Seq[String]) extends StdLexical {
}
}
}
-
-/**
- * The top level Spark SQL parser. This parser recognizes syntaxes that are available for all SQL
- * dialects supported by Spark SQL, and delegates all the other syntaxes to the `fallback` parser.
- *
- * @param fallback A function that parses an input string to a logical plan
- */
-private[sql] class SparkSQLParser(fallback: String => LogicalPlan) extends AbstractSparkSQLParser {
-
- // A parser for the key-value part of the "SET [key = [value ]]" syntax
- private object SetCommandParser extends RegexParsers {
- private val key: Parser[String] = "(?m)[^=]+".r
-
- private val value: Parser[String] = "(?m).*$".r
-
- private val pair: Parser[LogicalPlan] =
- (key ~ ("=".r ~> value).?).? ^^ {
- case None => SetCommand(None)
- case Some(k ~ v) => SetCommand(Some(k.trim -> v.map(_.trim)))
- }
-
- def apply(input: String): LogicalPlan = parseAll(pair, input) match {
- case Success(plan, _) => plan
- case x => sys.error(x.toString)
- }
- }
-
- protected val AS = Keyword("AS")
- protected val CACHE = Keyword("CACHE")
- protected val LAZY = Keyword("LAZY")
- protected val SET = Keyword("SET")
- protected val TABLE = Keyword("TABLE")
- protected val UNCACHE = Keyword("UNCACHE")
-
- protected implicit def asParser(k: Keyword): Parser[String] =
- lexical.allCaseVersions(k.str).map(x => x : Parser[String]).reduce(_ | _)
-
- private val reservedWords: Seq[String] =
- this
- .getClass
- .getMethods
- .filter(_.getReturnType == classOf[Keyword])
- .map(_.invoke(this).asInstanceOf[Keyword].str)
-
- override val lexical = new SqlLexical(reservedWords)
-
- override protected lazy val start: Parser[LogicalPlan] = cache | uncache | set | others
-
- private lazy val cache: Parser[LogicalPlan] =
- CACHE ~> LAZY.? ~ (TABLE ~> ident) ~ (AS ~> restInput).? ^^ {
- case isLazy ~ tableName ~ plan =>
- CacheTableCommand(tableName, plan.map(fallback), isLazy.isDefined)
- }
-
- private lazy val uncache: Parser[LogicalPlan] =
- UNCACHE ~ TABLE ~> ident ^^ {
- case tableName => UncacheTableCommand(tableName)
- }
-
- private lazy val set: Parser[LogicalPlan] =
- SET ~> restInput ^^ {
- case input => SetCommandParser(input)
- }
-
- private lazy val others: Parser[LogicalPlan] =
- wholeInput ^^ {
- case input => fallback(input)
- }
-}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/commands.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/commands.scala
index 5a1863953e..45905f8ef9 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/commands.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/commands.scala
@@ -17,8 +17,7 @@
package org.apache.spark.sql.catalyst.plans.logical
-import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference}
-import org.apache.spark.sql.catalyst.types.StringType
+import org.apache.spark.sql.catalyst.expressions.Attribute
/**
* A logical node that represents a non-query command to be executed by the system. For example,
@@ -28,48 +27,3 @@ abstract class Command extends LeafNode {
self: Product =>
def output: Seq[Attribute] = Seq.empty
}
-
-/**
- *
- * Commands of the form "SET [key [= value] ]".
- */
-case class SetCommand(kv: Option[(String, Option[String])]) extends Command {
- override def output = Seq(
- AttributeReference("", StringType, nullable = false)())
-}
-
-/**
- * Returned by a parser when the users only wants to see what query plan would be executed, without
- * actually performing the execution.
- */
-case class ExplainCommand(plan: LogicalPlan, extended: Boolean = false) extends Command {
- override def output =
- Seq(AttributeReference("plan", StringType, nullable = false)())
-}
-
-/**
- * Returned for the "CACHE TABLE tableName [AS SELECT ...]" command.
- */
-case class CacheTableCommand(tableName: String, plan: Option[LogicalPlan], isLazy: Boolean)
- extends Command
-
-/**
- * Returned for the "UNCACHE TABLE tableName" command.
- */
-case class UncacheTableCommand(tableName: String) extends Command
-
-/**
- * Returned for the "DESCRIBE [EXTENDED] [dbName.]tableName" command.
- * @param table The table to be described.
- * @param isExtended True if "DESCRIBE EXTENDED" is used. Otherwise, false.
- * It is effective only when the table is a Hive table.
- */
-case class DescribeCommand(
- table: LogicalPlan,
- isExtended: Boolean) extends Command {
- override def output = Seq(
- // Column names are based on Hive.
- AttributeReference("col_name", StringType, nullable = false)(),
- AttributeReference("data_type", StringType, nullable = false)(),
- AttributeReference("comment", StringType, nullable = false)())
-}