aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorEwan Leith <ewan.leith@realitymine.com>2015-09-21 23:43:20 -0700
committerReynold Xin <rxin@databricks.com>2015-09-21 23:43:20 -0700
commit781b21ba2a873ed29394c8dbc74fc700e3e0d17e (patch)
tree3df8da945957740791da3344526a57a1b8bbb325 /sql
parent0180b849dbaf191826231eda7dfaaf146a19602b (diff)
downloadspark-781b21ba2a873ed29394c8dbc74fc700e3e0d17e.tar.gz
spark-781b21ba2a873ed29394c8dbc74fc700e3e0d17e.tar.bz2
spark-781b21ba2a873ed29394c8dbc74fc700e3e0d17e.zip
[SPARK-10419] [SQL] Adding SQLServer support for datetimeoffset types to JdbcDialects
Reading from Microsoft SQL Server over jdbc fails when the table contains datetimeoffset types. This patch registers a SQLServer JDBC Dialect that maps datetimeoffset to a String, as Microsoft suggest. Author: Ewan Leith <ewan.leith@realitymine.com> Closes #8575 from realitymine-coordinator/sqlserver.
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala18
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala1
2 files changed, 19 insertions, 0 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala
index 68ebaaca6c..c70fea1c3f 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala
@@ -137,6 +137,8 @@ object JdbcDialects {
registerDialect(MySQLDialect)
registerDialect(PostgresDialect)
registerDialect(DB2Dialect)
+ registerDialect(MsSqlServerDialect)
+
/**
* Fetch the JdbcDialect class corresponding to a given database url.
@@ -260,3 +262,19 @@ case object DB2Dialect extends JdbcDialect {
case _ => None
}
}
+
+/**
+ * :: DeveloperApi ::
+ * Default Microsoft SQL Server dialect, mapping the datetimeoffset types to a String on read.
+ */
+@DeveloperApi
+case object MsSqlServerDialect extends JdbcDialect {
+ override def canHandle(url: String): Boolean = url.startsWith("jdbc:sqlserver")
+ override def getCatalystType(
+ sqlType: Int, typeName: String, size: Int, md: MetadataBuilder): Option[DataType] = {
+ if (typeName.contains("datetimeoffset")) {
+ // String is recommend by Microsoft SQL Server for datetimeoffset types in non-MS clients
+ Some(StringType)
+ } else None
+ }
+}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
index 5ab9381de4..c4b039a9c5 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
@@ -408,6 +408,7 @@ class JDBCSuite extends SparkFunSuite with BeforeAndAfter with SharedSQLContext
assert(JdbcDialects.get("jdbc:mysql://127.0.0.1/db") == MySQLDialect)
assert(JdbcDialects.get("jdbc:postgresql://127.0.0.1/db") == PostgresDialect)
assert(JdbcDialects.get("jdbc:db2://127.0.0.1/db") == DB2Dialect)
+ assert(JdbcDialects.get("jdbc:sqlserver://127.0.0.1/db") == MsSqlServerDialect)
assert(JdbcDialects.get("test.invalid") == NoopDialect)
}