aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorDaoyuan Wang <daoyuan.wang@intel.com>2015-05-07 10:05:01 -0700
committerYin Huai <yhuai@databricks.com>2015-05-07 10:05:01 -0700
commited9be06a4797bbb678355b361054c8872ac20b75 (patch)
treeb1e1ab669b38f0babb1b7bcebddd9b255b9715c0 /sql
parent4f87e9562aa0dfe5467d7fbaba9278213106377c (diff)
downloadspark-ed9be06a4797bbb678355b361054c8872ac20b75.tar.gz
spark-ed9be06a4797bbb678355b361054c8872ac20b75.tar.bz2
spark-ed9be06a4797bbb678355b361054c8872ac20b75.zip
[SPARK-7330] [SQL] avoid NPE at jdbc rdd
Thank nadavoosh point this out in #5590 Author: Daoyuan Wang <daoyuan.wang@intel.com> Closes #5877 from adrian-wang/jdbcrdd and squashes the following commits: cc11900 [Daoyuan Wang] avoid NPE in jdbcrdd
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala8
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala25
2 files changed, 32 insertions, 1 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala
index 325a326e2b..1a5083dbe0 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala
@@ -362,7 +362,13 @@ private[sql] class JDBCRDD(
conversions(i) match {
case BooleanConversion => mutableRow.setBoolean(i, rs.getBoolean(pos))
case DateConversion =>
- mutableRow.update(i, DateUtils.fromJavaDate(rs.getDate(pos)))
+ // DateUtils.fromJavaDate does not handle null value, so we need to check it.
+ val dateVal = rs.getDate(pos)
+ if (dateVal != null) {
+ mutableRow.update(i, DateUtils.fromJavaDate(dateVal))
+ } else {
+ mutableRow.update(i, null)
+ }
case DecimalConversion =>
val decimalVal = rs.getBigDecimal(pos)
if (decimalVal == null) {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
index 3ec17d37c0..021affafe3 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
@@ -104,6 +104,8 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
).executeUpdate()
conn.prepareStatement("insert into test.timetypes values ('12:34:56', "
+ "'1996-01-01', '2002-02-20 11:22:33.543543543')").executeUpdate()
+ conn.prepareStatement("insert into test.timetypes values ('12:34:56', "
+ + "null, '2002-02-20 11:22:33.543543543')").executeUpdate()
conn.commit()
sql(
s"""
@@ -127,6 +129,23 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
|OPTIONS (url '$url', dbtable 'TEST.FLTTYPES', user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\n", " "))
+ conn.prepareStatement(
+ s"""
+ |create table test.nulltypes (a INT, b BOOLEAN, c TINYINT, d BINARY(20), e VARCHAR(20),
+ |f VARCHAR_IGNORECASE(20), g CHAR(20), h BLOB, i CLOB, j TIME, k DATE, l TIMESTAMP,
+ |m DOUBLE, n REAL, o DECIMAL(40, 20))
+ """.stripMargin.replaceAll("\n", " ")).executeUpdate()
+ conn.prepareStatement("insert into test.nulltypes values ("
+ + "null, null, null, null, null, null, null, null, null, "
+ + "null, null, null, null, null, null)").executeUpdate()
+ conn.commit()
+ sql(
+ s"""
+ |CREATE TEMPORARY TABLE nulltypes
+ |USING org.apache.spark.sql.jdbc
+ |OPTIONS (url '$url', dbtable 'TEST.NULLTYPES', user 'testUser', password 'testPass')
+ """.stripMargin.replaceAll("\n", " "))
+
// Untested: IDENTITY, OTHER, UUID, ARRAY, and GEOMETRY types.
}
@@ -254,6 +273,7 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
val rows = TestSQLContext.jdbc(urlWithUserAndPass, "TEST.TIMETYPES").collect()
val cachedRows = TestSQLContext.jdbc(urlWithUserAndPass, "TEST.TIMETYPES").cache().collect()
assert(rows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01"))
+ assert(rows(1).getAs[java.sql.Date](1) === null)
assert(cachedRows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01"))
}
@@ -266,6 +286,11 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
assert(cachedRows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01"))
}
+ test("test types for null value") {
+ val rows = TestSQLContext.jdbc(urlWithUserAndPass, "TEST.NULLTYPES").collect()
+ assert((0 to 14).forall(i => rows(0).isNullAt(i)))
+ }
+
test("H2 floating-point types") {
val rows = sql("SELECT * FROM flttypes").collect()
assert(rows(0).getDouble(0) === 1.00000000000000022) // Yes, I meant ==.