aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst/src/test
diff options
context:
space:
mode:
authorHerman van Hovell <hvanhovell@databricks.com>2016-09-03 19:02:20 +0200
committerHerman van Hovell <hvanhovell@databricks.com>2016-09-03 19:02:20 +0200
commitc2a1576c230697f56f282b6388c79835377e0f2f (patch)
tree9d870d3a6366c728a9058d67c3bb1bb123859828 /sql/catalyst/src/test
parenta8a35b39b92fc9000eaac102c67c66be30b05e54 (diff)
downloadspark-c2a1576c230697f56f282b6388c79835377e0f2f.tar.gz
spark-c2a1576c230697f56f282b6388c79835377e0f2f.tar.bz2
spark-c2a1576c230697f56f282b6388c79835377e0f2f.zip
[SPARK-17335][SQL] Fix ArrayType and MapType CatalogString.
## What changes were proposed in this pull request? the `catalogString` for `ArrayType` and `MapType` currently calls the `simpleString` method on its children. This is a problem when the child is a struct, the `struct.simpleString` implementation truncates the number of fields it shows (25 at max). This breaks the generation of a proper `catalogString`, and has shown to cause errors while writing to Hive. This PR fixes this by providing proper `catalogString` implementations for `ArrayData` or `MapData`. ## How was this patch tested? Added testing for `catalogString` to `DataTypeSuite`. Author: Herman van Hovell <hvanhovell@databricks.com> Closes #14938 from hvanhovell/SPARK-17335.
Diffstat (limited to 'sql/catalyst/src/test')
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala30
1 files changed, 30 insertions, 0 deletions
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala
index 688bc3e602..b8ab9a9963 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala
@@ -18,6 +18,7 @@
package org.apache.spark.sql.types
import org.apache.spark.{SparkException, SparkFunSuite}
+import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
class DataTypeSuite extends SparkFunSuite {
@@ -359,4 +360,33 @@ class DataTypeSuite extends SparkFunSuite {
StructField("a", StringType, nullable = false) ::
StructField("b", StringType, nullable = false) :: Nil),
expected = false)
+
+ def checkCatalogString(dt: DataType): Unit = {
+ test(s"catalogString: $dt") {
+ val dt2 = CatalystSqlParser.parseDataType(dt.catalogString)
+ assert(dt === dt2)
+ }
+ }
+ def createStruct(n: Int): StructType = new StructType(Array.tabulate(n) {
+ i => StructField(s"col$i", IntegerType, nullable = true)
+ })
+
+ checkCatalogString(BooleanType)
+ checkCatalogString(ByteType)
+ checkCatalogString(ShortType)
+ checkCatalogString(IntegerType)
+ checkCatalogString(LongType)
+ checkCatalogString(FloatType)
+ checkCatalogString(DoubleType)
+ checkCatalogString(DecimalType(10, 5))
+ checkCatalogString(BinaryType)
+ checkCatalogString(StringType)
+ checkCatalogString(DateType)
+ checkCatalogString(TimestampType)
+ checkCatalogString(createStruct(4))
+ checkCatalogString(createStruct(40))
+ checkCatalogString(ArrayType(IntegerType))
+ checkCatalogString(ArrayType(createStruct(40)))
+ checkCatalogString(MapType(IntegerType, StringType))
+ checkCatalogString(MapType(IntegerType, createStruct(40)))
}