aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorDaoyuan Wang <daoyuan.wang@intel.com>2015-04-22 19:14:28 -0700
committerReynold Xin <rxin@databricks.com>2015-04-22 19:14:28 -0700
commit04525c077c638a7e615c294ba988e35036554f5f (patch)
tree32640684cfc8fee5374b75ef5d3f12276d239ed7 /sql
parentf4f39981f4f5e88c30eec7d0b107e2c3cdc268c9 (diff)
downloadspark-04525c077c638a7e615c294ba988e35036554f5f.tar.gz
spark-04525c077c638a7e615c294ba988e35036554f5f.tar.bz2
spark-04525c077c638a7e615c294ba988e35036554f5f.zip
[SPARK-6967] [SQL] fix date type convertion in jdbcrdd
This pr convert java.sql.Date type into Int for JDBCRDD. Author: Daoyuan Wang <daoyuan.wang@intel.com> Closes #5590 from adrian-wang/datebug and squashes the following commits: f897b81 [Daoyuan Wang] add a test case 3c9184c [Daoyuan Wang] fix date type convertion in jdbcrdd
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala4
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala7
2 files changed, 9 insertions, 2 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala
index 8b1edec20f..b975191d41 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala
@@ -350,8 +350,8 @@ private[sql] class JDBCRDD(
val pos = i + 1
conversions(i) match {
case BooleanConversion => mutableRow.setBoolean(i, rs.getBoolean(pos))
- // TODO(davies): convert Date into Int
- case DateConversion => mutableRow.update(i, rs.getDate(pos))
+ case DateConversion =>
+ mutableRow.update(i, DateUtils.fromJavaDate(rs.getDate(pos)))
case DecimalConversion => mutableRow.update(i, rs.getBigDecimal(pos))
case DoubleConversion => mutableRow.setDouble(i, rs.getDouble(pos))
case FloatConversion => mutableRow.setFloat(i, rs.getFloat(pos))
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
index 3596b183d4..db096af453 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
@@ -249,6 +249,13 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
assert(rows(0).getAs[java.sql.Timestamp](2).getNanos === 543543543)
}
+ test("test DATE types") {
+ val rows = TestSQLContext.jdbc(urlWithUserAndPass, "TEST.TIMETYPES").collect()
+ val cachedRows = TestSQLContext.jdbc(urlWithUserAndPass, "TEST.TIMETYPES").cache().collect()
+ assert(rows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01"))
+ assert(cachedRows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01"))
+ }
+
test("H2 floating-point types") {
val rows = sql("SELECT * FROM flttypes").collect()
assert(rows(0).getDouble(0) === 1.00000000000000022) // Yes, I meant ==.