aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorOopsOutOfMemory <victorshengli@126.com>2015-02-01 18:41:49 -0800
committerMichael Armbrust <michael@databricks.com>2015-02-01 18:41:58 -0800
commit1b56f1d6bb079a669ae83e70ee515373ade2a469 (patch)
tree38399b3dbfab5f5964db3eed41aed6d890ee5b86 /sql
parent7712ed5b16d809e4cf63285b78f9b65d2588fb21 (diff)
downloadspark-1b56f1d6bb079a669ae83e70ee515373ade2a469.tar.gz
spark-1b56f1d6bb079a669ae83e70ee515373ade2a469.tar.bz2
spark-1b56f1d6bb079a669ae83e70ee515373ade2a469.zip
[SPARK-5196][SQL] Support `comment` in Create Table Field DDL
Support `comment` in create a table field. __CREATE TEMPORARY TABLE people(name string `comment` "the name of a person")__ Author: OopsOutOfMemory <victorshengli@126.com> Closes #3999 from OopsOutOfMemory/meta_comment and squashes the following commits: 39150d4 [OopsOutOfMemory] add comment and refine test suite
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala11
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/sources/TableScanSuite.scala20
2 files changed, 28 insertions, 3 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala
index b4af91a768..b7c721f8c0 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala
@@ -27,7 +27,6 @@ import org.apache.spark.sql.execution.RunnableCommand
import org.apache.spark.sql.types._
import org.apache.spark.util.Utils
-
/**
* A parser for foreign DDL commands.
*/
@@ -59,6 +58,7 @@ private[sql] class DDLParser extends AbstractSparkSQLParser with Logging {
protected val TABLE = Keyword("TABLE")
protected val USING = Keyword("USING")
protected val OPTIONS = Keyword("OPTIONS")
+ protected val COMMENT = Keyword("COMMENT")
// Data types.
protected val STRING = Keyword("STRING")
@@ -111,8 +111,13 @@ private[sql] class DDLParser extends AbstractSparkSQLParser with Logging {
protected lazy val pair: Parser[(String, String)] = ident ~ stringLit ^^ { case k ~ v => (k,v) }
protected lazy val column: Parser[StructField] =
- ident ~ dataType ^^ { case columnName ~ typ =>
- StructField(columnName, typ)
+ ident ~ dataType ~ (COMMENT ~> stringLit).? ^^ { case columnName ~ typ ~ cm =>
+ val meta = cm match {
+ case Some(comment) =>
+ new MetadataBuilder().putString(COMMENT.str.toLowerCase(), comment).build()
+ case None => Metadata.empty
+ }
+ StructField(columnName, typ, true, meta)
}
protected lazy val primitiveType: Parser[DataType] =
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/TableScanSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/TableScanSuite.scala
index b1e0919b7a..0a4d4b6342 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/sources/TableScanSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/TableScanSuite.scala
@@ -344,4 +344,24 @@ class TableScanSuite extends DataSourceTest {
}
assert(schemaNeeded.getMessage.contains("A schema needs to be specified when using"))
}
+
+ test("SPARK-5196 schema field with comment") {
+ sql(
+ """
+ |CREATE TEMPORARY TABLE student(name string comment "SN", age int comment "SA", grade int)
+ |USING org.apache.spark.sql.sources.AllDataTypesScanSource
+ |OPTIONS (
+ | from '1',
+ | to '10'
+ |)
+ """.stripMargin)
+
+ val planned = sql("SELECT * FROM student").queryExecution.executedPlan
+ val comments = planned.schema.fields.map { field =>
+ if (field.metadata.contains("comment")) field.metadata.getString("comment")
+ else "NO_COMMENT"
+ }.mkString(",")
+
+ assert(comments === "SN,SA,NO_COMMENT")
+ }
}