aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/test/scala
diff options
context:
space:
mode:
authorWenchen Fan <wenchen@databricks.com>2017-03-20 21:43:14 -0700
committerXiao Li <gatorsmile@gmail.com>2017-03-20 21:43:14 -0700
commit68d65fae71e475ad811a9716098aca03a2af9532 (patch)
tree8eb62ef41f500b43cdfe1325c35dc39498841020 /sql/core/src/test/scala
parent21e366aea5a7f49e42e78dce06ff6b3ee1e36f06 (diff)
downloadspark-68d65fae71e475ad811a9716098aca03a2af9532.tar.gz
spark-68d65fae71e475ad811a9716098aca03a2af9532.tar.bz2
spark-68d65fae71e475ad811a9716098aca03a2af9532.zip
[SPARK-19949][SQL] unify bad record handling in CSV and JSON
## What changes were proposed in this pull request? Currently JSON and CSV have exactly the same logic about handling bad records, this PR tries to abstract it and put it in a upper level to reduce code duplication. The overall idea is, we make the JSON and CSV parser to throw a BadRecordException, then the upper level, FailureSafeParser, handles bad records according to the parse mode. Behavior changes: 1. with PERMISSIVE mode, if the number of tokens doesn't match the schema, previously CSV parser will treat it as a legal record and parse as many tokens as possible. After this PR, we treat it as an illegal record, and put the raw record string in a special column, but we still parse as many tokens as possible. 2. all logging is removed as they are not very useful in practice. ## How was this patch tested? existing tests Author: Wenchen Fan <wenchen@databricks.com> Author: hyukjinkwon <gurwls223@gmail.com> Author: Wenchen Fan <cloud0fan@gmail.com> Closes #17315 from cloud-fan/bad-record2.
Diffstat (limited to 'sql/core/src/test/scala')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala8
2 files changed, 5 insertions, 5 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala
index 95dfdf5b29..598babfe0e 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala
@@ -293,7 +293,7 @@ class CSVSuite extends QueryTest with SharedSQLContext with SQLTestUtils {
.load(testFile(carsFile)).collect()
}
- assert(exception.getMessage.contains("Malformed line in FAILFAST mode: 2015,Chevy,Volt"))
+ assert(exception.getMessage.contains("Malformed CSV record"))
}
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
index 9b0efcbdaf..56fcf773f7 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
@@ -1043,7 +1043,7 @@ class JsonSuite extends QueryTest with SharedSQLContext with TestJsonData {
.json(corruptRecords)
.collect()
}
- assert(exceptionOne.getMessage.contains("Malformed line in FAILFAST mode: {"))
+ assert(exceptionOne.getMessage.contains("JsonParseException"))
val exceptionTwo = intercept[SparkException] {
spark.read
@@ -1052,7 +1052,7 @@ class JsonSuite extends QueryTest with SharedSQLContext with TestJsonData {
.json(corruptRecords)
.collect()
}
- assert(exceptionTwo.getMessage.contains("Malformed line in FAILFAST mode: {"))
+ assert(exceptionTwo.getMessage.contains("JsonParseException"))
}
test("Corrupt records: DROPMALFORMED mode") {
@@ -1929,7 +1929,7 @@ class JsonSuite extends QueryTest with SharedSQLContext with TestJsonData {
.json(path)
.collect()
}
- assert(exceptionOne.getMessage.contains("Malformed line in FAILFAST mode"))
+ assert(exceptionOne.getMessage.contains("Failed to parse a value"))
val exceptionTwo = intercept[SparkException] {
spark.read
@@ -1939,7 +1939,7 @@ class JsonSuite extends QueryTest with SharedSQLContext with TestJsonData {
.json(path)
.collect()
}
- assert(exceptionTwo.getMessage.contains("Malformed line in FAILFAST mode"))
+ assert(exceptionTwo.getMessage.contains("Failed to parse a value"))
}
}