aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst/src
diff options
context:
space:
mode:
Diffstat (limited to 'sql/catalyst/src')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala2
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala30
3 files changed, 34 insertions, 0 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala
index 520e344361..82a03b0afc 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala
@@ -77,6 +77,8 @@ case class ArrayType(elementType: DataType, containsNull: Boolean) extends DataT
override def simpleString: String = s"array<${elementType.simpleString}>"
+ override def catalogString: String = s"array<${elementType.catalogString}>"
+
override def sql: String = s"ARRAY<${elementType.sql}>"
override private[spark] def asNullable: ArrayType =
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala
index 454ea403ba..178960929b 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala
@@ -64,6 +64,8 @@ case class MapType(
override def simpleString: String = s"map<${keyType.simpleString},${valueType.simpleString}>"
+ override def catalogString: String = s"map<${keyType.catalogString},${valueType.catalogString}>"
+
override def sql: String = s"MAP<${keyType.sql}, ${valueType.sql}>"
override private[spark] def asNullable: MapType =
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala
index 688bc3e602..b8ab9a9963 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala
@@ -18,6 +18,7 @@
package org.apache.spark.sql.types
import org.apache.spark.{SparkException, SparkFunSuite}
+import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
class DataTypeSuite extends SparkFunSuite {
@@ -359,4 +360,33 @@ class DataTypeSuite extends SparkFunSuite {
StructField("a", StringType, nullable = false) ::
StructField("b", StringType, nullable = false) :: Nil),
expected = false)
+
+ def checkCatalogString(dt: DataType): Unit = {
+ test(s"catalogString: $dt") {
+ val dt2 = CatalystSqlParser.parseDataType(dt.catalogString)
+ assert(dt === dt2)
+ }
+ }
+ def createStruct(n: Int): StructType = new StructType(Array.tabulate(n) {
+ i => StructField(s"col$i", IntegerType, nullable = true)
+ })
+
+ checkCatalogString(BooleanType)
+ checkCatalogString(ByteType)
+ checkCatalogString(ShortType)
+ checkCatalogString(IntegerType)
+ checkCatalogString(LongType)
+ checkCatalogString(FloatType)
+ checkCatalogString(DoubleType)
+ checkCatalogString(DecimalType(10, 5))
+ checkCatalogString(BinaryType)
+ checkCatalogString(StringType)
+ checkCatalogString(DateType)
+ checkCatalogString(TimestampType)
+ checkCatalogString(createStruct(4))
+ checkCatalogString(createStruct(40))
+ checkCatalogString(ArrayType(IntegerType))
+ checkCatalogString(ArrayType(createStruct(40)))
+ checkCatalogString(MapType(IntegerType, StringType))
+ checkCatalogString(MapType(IntegerType, createStruct(40)))
}