aboutsummaryrefslogtreecommitdiff
path: root/external
diff options
context:
space:
mode:
authorsureshthalamati <suresh.thalamati@gmail.com>2017-01-20 19:23:20 -0800
committergatorsmile <gatorsmile@gmail.com>2017-01-20 19:23:20 -0800
commitf174cdc7478d0b81f9cfa896284a5ec4c6bb952d (patch)
tree850c7483cc164e6cd833663c89d90c5b1818f38c /external
parent54268b42dcd6b5f6ef65227eed9855d1a032826f (diff)
downloadspark-f174cdc7478d0b81f9cfa896284a5ec4c6bb952d.tar.gz
spark-f174cdc7478d0b81f9cfa896284a5ec4c6bb952d.tar.bz2
spark-f174cdc7478d0b81f9cfa896284a5ec4c6bb952d.zip
[SPARK-14536][SQL] fix to handle null value in array type column for postgres.
## What changes were proposed in this pull request? JDBC read is failing with NPE due to missing null value check for array data type if the source table has null values in the array type column. For null values Resultset.getArray() returns null. This PR adds null safe check to the Resultset.getArray() value before invoking method on the Array object. ## How was this patch tested? Updated the PostgresIntegration test suite to test null values. Ran docker integration tests on my laptop. Author: sureshthalamati <suresh.thalamati@gmail.com> Closes #15192 from sureshthalamati/jdbc_array_null_fix-SPARK-14536.
Diffstat (limited to 'external')
-rw-r--r--external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala12
1 files changed, 10 insertions, 2 deletions
diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala
index c9325dea0b..a1a065a443 100644
--- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala
+++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala
@@ -51,12 +51,17 @@ class PostgresIntegrationSuite extends DockerJDBCIntegrationSuite {
+ "B'1000100101', E'\\\\xDEADBEEF', true, '172.16.0.42', '192.168.0.0/16', "
+ """'{1, 2}', '{"a", null, "b"}', '{0.11, 0.22}', '{0.11, 0.22}', 'd1', 1.01, 1)"""
).executeUpdate()
+ conn.prepareStatement("INSERT INTO bar VALUES (null, null, null, null, null, "
+ + "null, null, null, null, null, "
+ + "null, null, null, null, null, null, null)"
+ ).executeUpdate()
}
test("Type mapping for various types") {
val df = sqlContext.read.jdbc(jdbcUrl, "bar", new Properties)
- val rows = df.collect()
- assert(rows.length == 1)
+ val rows = df.collect().sortBy(_.toString())
+ assert(rows.length == 2)
+ // Test the types, and values using the first row.
val types = rows(0).toSeq.map(x => x.getClass)
assert(types.length == 17)
assert(classOf[String].isAssignableFrom(types(0)))
@@ -96,6 +101,9 @@ class PostgresIntegrationSuite extends DockerJDBCIntegrationSuite {
assert(rows(0).getString(14) == "d1")
assert(rows(0).getFloat(15) == 1.01f)
assert(rows(0).getShort(16) == 1)
+
+ // Test reading null values using the second row.
+ assert(0.until(16).forall(rows(1).isNullAt(_)))
}
test("Basic write test") {