aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/test
diff options
context:
space:
mode:
authorTakeshi Yamamuro <yamamuro@apache.org>2017-02-22 21:39:20 -0800
committerWenchen Fan <wenchen@databricks.com>2017-02-22 21:39:20 -0800
commit769aa0f1d22d3c6d4c7871468344d82c8dc36260 (patch)
tree0f90fe73597bf9821fcde910cd17f51aea566849 /sql/core/src/test
parent66c4b79afd3aad4c1f9446b641e28d5513cbdf5f (diff)
downloadspark-769aa0f1d22d3c6d4c7871468344d82c8dc36260.tar.gz
spark-769aa0f1d22d3c6d4c7871468344d82c8dc36260.tar.bz2
spark-769aa0f1d22d3c6d4c7871468344d82c8dc36260.zip
[SPARK-19695][SQL] Throw an exception if a `columnNameOfCorruptRecord` field violates requirements in json formats
## What changes were proposed in this pull request? This pr comes from #16928 and fixed a json behaviour along with the CSV one. ## How was this patch tested? Added tests in `JsonSuite`. Author: Takeshi Yamamuro <yamamuro@apache.org> Closes #17023 from maropu/SPARK-19695.
Diffstat (limited to 'sql/core/src/test')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala31
1 files changed, 31 insertions, 0 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
index 05aa2ab2ce..0e01be2410 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
@@ -1944,4 +1944,35 @@ class JsonSuite extends QueryTest with SharedSQLContext with TestJsonData {
assert(exceptionTwo.getMessage.contains("Malformed line in FAILFAST mode"))
}
}
+
+ test("Throw an exception if a `columnNameOfCorruptRecord` field violates requirements") {
+ val columnNameOfCorruptRecord = "_unparsed"
+ val schema = StructType(
+ StructField(columnNameOfCorruptRecord, IntegerType, true) ::
+ StructField("a", StringType, true) ::
+ StructField("b", StringType, true) ::
+ StructField("c", StringType, true) :: Nil)
+ val errMsg = intercept[AnalysisException] {
+ spark.read
+ .option("mode", "PERMISSIVE")
+ .option("columnNameOfCorruptRecord", columnNameOfCorruptRecord)
+ .schema(schema)
+ .json(corruptRecords)
+ }.getMessage
+ assert(errMsg.startsWith("The field for corrupt records must be string type and nullable"))
+
+ withTempPath { dir =>
+ val path = dir.getCanonicalPath
+ corruptRecords.toDF("value").write.text(path)
+ val errMsg = intercept[AnalysisException] {
+ spark.read
+ .option("mode", "PERMISSIVE")
+ .option("columnNameOfCorruptRecord", columnNameOfCorruptRecord)
+ .schema(schema)
+ .json(path)
+ .collect
+ }.getMessage
+ assert(errMsg.startsWith("The field for corrupt records must be string type and nullable"))
+ }
+ }
}