aboutsummaryrefslogtreecommitdiff
path: root/sql/hive/src/main
diff options
context:
space:
mode:
authorYin Huai <yhuai@databricks.com>2015-03-21 13:27:53 -0700
committerMichael Armbrust <michael@databricks.com>2015-03-21 13:27:53 -0700
commit94a102acb80a7c77f57409ece1f8dbbba791b774 (patch)
tree334a3c7028af526bfd1954e05eb4e1148cfdb8ab /sql/hive/src/main
parentee569a0c7171d149eee52877def902378eaf695e (diff)
downloadspark-94a102acb80a7c77f57409ece1f8dbbba791b774.tar.gz
spark-94a102acb80a7c77f57409ece1f8dbbba791b774.tar.bz2
spark-94a102acb80a7c77f57409ece1f8dbbba791b774.zip
[SPARK-6250][SPARK-6146][SPARK-5911][SQL] Types are now reserved words in DDL parser.
This PR creates a trait `DataTypeParser` used to parse data types. This trait aims to be single place to provide the functionality of parsing data types' string representation. It is currently mixed in with `DDLParser` and `SqlParser`. It is also used to parse the data type for `DataFrame.cast` and to convert Hive metastore's data type string back to a `DataType`. JIRA: https://issues.apache.org/jira/browse/SPARK-6250 Author: Yin Huai <yhuai@databricks.com> Closes #5078 from yhuai/ddlKeywords and squashes the following commits: 0e66097 [Yin Huai] Special handle struct<>. fea6012 [Yin Huai] Style. c9733fb [Yin Huai] Create a trait to parse data types.
Diffstat (limited to 'sql/hive/src/main')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala8
1 files changed, 2 insertions, 6 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
index fe86bd206a..949a4e54e6 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
@@ -756,7 +756,7 @@ private[hive] case class MetastoreRelation
implicit class SchemaAttribute(f: FieldSchema) {
def toAttribute = AttributeReference(
f.getName,
- sqlContext.ddlParser.parseType(f.getType),
+ HiveMetastoreTypes.toDataType(f.getType),
// Since data can be dumped in randomly with no validation, everything is nullable.
nullable = true
)(qualifiers = Seq(alias.getOrElse(tableName)))
@@ -779,11 +779,7 @@ private[hive] case class MetastoreRelation
private[hive] object HiveMetastoreTypes {
- protected val ddlParser = new DDLParser(HiveQl.parseSql(_))
-
- def toDataType(metastoreType: String): DataType = synchronized {
- ddlParser.parseType(metastoreType)
- }
+ def toDataType(metastoreType: String): DataType = DataTypeParser(metastoreType)
def toMetastoreType(dt: DataType): String = dt match {
case ArrayType(elementType, _) => s"array<${toMetastoreType(elementType)}>"