aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/test/scala
diff options
context:
space:
mode:
authorYuming Wang <wgyumg@gmail.com>2016-08-05 16:11:54 +0100
committerSean Owen <sowen@cloudera.com>2016-08-05 16:11:54 +0100
commit39a2b2ea74d420caa37019e3684f65b3a6fcb388 (patch)
tree7d98547f7167129d7fd7b9b4843786de68026bc2 /sql/core/src/test/scala
parente026064143367e4614cb866e321cc521fdde3170 (diff)
downloadspark-39a2b2ea74d420caa37019e3684f65b3a6fcb388.tar.gz
spark-39a2b2ea74d420caa37019e3684f65b3a6fcb388.tar.bz2
spark-39a2b2ea74d420caa37019e3684f65b3a6fcb388.zip
[SPARK-16625][SQL] General data types to be mapped to Oracle
## What changes were proposed in this pull request? Spark will convert **BooleanType** to **BIT(1)**, **LongType** to **BIGINT**, **ByteType** to **BYTE** when saving DataFrame to Oracle, but Oracle does not support BIT, BIGINT and BYTE types. This PR is convert following _Spark Types_ to _Oracle types_ refer to [Oracle Developer's Guide](https://docs.oracle.com/cd/E19501-01/819-3659/gcmaz/) Spark Type | Oracle ----|---- BooleanType | NUMBER(1) IntegerType | NUMBER(10) LongType | NUMBER(19) FloatType | NUMBER(19, 4) DoubleType | NUMBER(19, 4) ByteType | NUMBER(3) ShortType | NUMBER(5) ## How was this patch tested? Add new tests in [JDBCSuite.scala](https://github.com/wangyum/spark/commit/22b0c2a4228cb8b5098ad741ddf4d1904e745ff6#diff-dc4b58851b084b274df6fe6b189db84d) and [OracleDialect.scala](https://github.com/wangyum/spark/commit/22b0c2a4228cb8b5098ad741ddf4d1904e745ff6#diff-5e0cadf526662f9281aa26315b3750ad) Author: Yuming Wang <wgyumg@gmail.com> Closes #14377 from wangyum/SPARK-16625.
Diffstat (limited to 'sql/core/src/test/scala')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala21
1 files changed, 21 insertions, 0 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
index 995b1200a2..2d8ee338a9 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
@@ -739,6 +739,27 @@ class JDBCSuite extends SparkFunSuite
map(_.databaseTypeDefinition).get == "VARCHAR2(255)")
}
+ test("SPARK-16625: General data types to be mapped to Oracle") {
+
+ def getJdbcType(dialect: JdbcDialect, dt: DataType): String = {
+ dialect.getJDBCType(dt).orElse(JdbcUtils.getCommonJDBCType(dt)).
+ map(_.databaseTypeDefinition).get
+ }
+
+ val oracleDialect = JdbcDialects.get("jdbc:oracle://127.0.0.1/db")
+ assert(getJdbcType(oracleDialect, BooleanType) == "NUMBER(1)")
+ assert(getJdbcType(oracleDialect, IntegerType) == "NUMBER(10)")
+ assert(getJdbcType(oracleDialect, LongType) == "NUMBER(19)")
+ assert(getJdbcType(oracleDialect, FloatType) == "NUMBER(19, 4)")
+ assert(getJdbcType(oracleDialect, DoubleType) == "NUMBER(19, 4)")
+ assert(getJdbcType(oracleDialect, ByteType) == "NUMBER(3)")
+ assert(getJdbcType(oracleDialect, ShortType) == "NUMBER(5)")
+ assert(getJdbcType(oracleDialect, StringType) == "VARCHAR2(255)")
+ assert(getJdbcType(oracleDialect, BinaryType) == "BLOB")
+ assert(getJdbcType(oracleDialect, DateType) == "DATE")
+ assert(getJdbcType(oracleDialect, TimestampType) == "TIMESTAMP")
+ }
+
private def assertEmptyQuery(sqlString: String): Unit = {
assert(sql(sqlString).collect().isEmpty)
}