aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2016-02-04 22:43:44 -0800
committerReynold Xin <rxin@databricks.com>2016-02-04 22:43:44 -0800
commit82d84ff2dd3efb3bda20b529f09a4022586fb722 (patch)
tree8b1baccb2121e8c220e8d1c149de7c04ea3f4d9c
parent352102ed0b7be8c335553d7e0389fd7ce83f5fbf (diff)
downloadspark-82d84ff2dd3efb3bda20b529f09a4022586fb722.tar.gz
spark-82d84ff2dd3efb3bda20b529f09a4022586fb722.tar.bz2
spark-82d84ff2dd3efb3bda20b529f09a4022586fb722.zip
[SPARK-13187][SQL] Add boolean/long/double options in DataFrameReader/Writer
This patch adds option function for boolean, long, and double types. This makes it slightly easier for Spark users to specify options without turning them into strings. Using the JSON data source as an example. Before this patch: ```scala sqlContext.read.option("primitivesAsString", "true").json("/path/to/json") ``` After this patch: Before this patch: ```scala sqlContext.read.option("primitivesAsString", true).json("/path/to/json") ``` Author: Reynold Xin <rxin@databricks.com> Closes #11072 from rxin/SPARK-13187.
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala21
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala21
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/streaming/DataStreamReaderSuite.scala25
3 files changed, 67 insertions, 0 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala
index a58643a5ba..962fdadf14 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala
@@ -79,6 +79,27 @@ class DataFrameReader private[sql](sqlContext: SQLContext) extends Logging {
}
/**
+ * Adds an input option for the underlying data source.
+ *
+ * @since 2.0.0
+ */
+ def option(key: String, value: Boolean): DataFrameReader = option(key, value.toString)
+
+ /**
+ * Adds an input option for the underlying data source.
+ *
+ * @since 2.0.0
+ */
+ def option(key: String, value: Long): DataFrameReader = option(key, value.toString)
+
+ /**
+ * Adds an input option for the underlying data source.
+ *
+ * @since 2.0.0
+ */
+ def option(key: String, value: Double): DataFrameReader = option(key, value.toString)
+
+ /**
* (Scala-specific) Adds input options for the underlying data source.
*
* @since 1.4.0
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala
index 80447fefe1..8060198968 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala
@@ -96,6 +96,27 @@ final class DataFrameWriter private[sql](df: DataFrame) {
}
/**
+ * Adds an output option for the underlying data source.
+ *
+ * @since 2.0.0
+ */
+ def option(key: String, value: Boolean): DataFrameWriter = option(key, value.toString)
+
+ /**
+ * Adds an output option for the underlying data source.
+ *
+ * @since 2.0.0
+ */
+ def option(key: String, value: Long): DataFrameWriter = option(key, value.toString)
+
+ /**
+ * Adds an output option for the underlying data source.
+ *
+ * @since 2.0.0
+ */
+ def option(key: String, value: Double): DataFrameWriter = option(key, value.toString)
+
+ /**
* (Scala-specific) Adds output options for the underlying data source.
*
* @since 1.4.0
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/DataStreamReaderSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/DataStreamReaderSuite.scala
index b36b41cac9..95a17f338d 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/DataStreamReaderSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/DataStreamReaderSuite.scala
@@ -162,4 +162,29 @@ class DataStreamReaderWriterSuite extends StreamTest with SharedSQLContext {
assert(LastOptions.parameters("path") == "/test")
}
+ test("test different data types for options") {
+ val df = sqlContext.read
+ .format("org.apache.spark.sql.streaming.test")
+ .option("intOpt", 56)
+ .option("boolOpt", false)
+ .option("doubleOpt", 6.7)
+ .stream("/test")
+
+ assert(LastOptions.parameters("intOpt") == "56")
+ assert(LastOptions.parameters("boolOpt") == "false")
+ assert(LastOptions.parameters("doubleOpt") == "6.7")
+
+ LastOptions.parameters = null
+ df.write
+ .format("org.apache.spark.sql.streaming.test")
+ .option("intOpt", 56)
+ .option("boolOpt", false)
+ .option("doubleOpt", 6.7)
+ .stream("/test")
+ .stop()
+
+ assert(LastOptions.parameters("intOpt") == "56")
+ assert(LastOptions.parameters("boolOpt") == "false")
+ assert(LastOptions.parameters("doubleOpt") == "6.7")
+ }
}