aboutsummaryrefslogtreecommitdiff
path: root/sql/core
diff options
context:
space:
mode:
authorStavros Kontopoulos <stavros.kontopoulos@typesafe.com>2016-03-15 12:18:30 -0700
committerReynold Xin <rxin@databricks.com>2016-03-15 12:18:30 -0700
commit50e3644d00c75de8273cce191debde68061e6e13 (patch)
tree7056ffa7c80e360f824c207419836c614013227a /sql/core
parentd89c71417c384e1a2820bd80b67c26e11405aebc (diff)
downloadspark-50e3644d00c75de8273cce191debde68061e6e13.tar.gz
spark-50e3644d00c75de8273cce191debde68061e6e13.tar.bz2
spark-50e3644d00c75de8273cce191debde68061e6e13.zip
[SPARK-13896][SQL][STRING] Dataset.toJSON should return Dataset
## What changes were proposed in this pull request? Change the return type of toJson in Dataset class ## How was this patch tested? No additional unit test required. Author: Stavros Kontopoulos <stavros.kontopoulos@typesafe.com> Closes #11732 from skonto/fix_toJson.
Diffstat (limited to 'sql/core')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala6
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala4
2 files changed, 6 insertions, 4 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
index f7ef0de21c..969fcdf428 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
@@ -1989,9 +1989,9 @@ class Dataset[T] private[sql](
* @group rdd
* @since 1.3.0
*/
- def toJSON: RDD[String] = {
+ def toJSON: Dataset[String] = {
val rowSchema = this.schema
- queryExecution.toRdd.mapPartitions { iter =>
+ val rdd = queryExecution.toRdd.mapPartitions { iter =>
val writer = new CharArrayWriter()
// create the Generator without separator inserted between 2 records
val gen = new JsonFactory().createGenerator(writer).setRootValueSeparator(null)
@@ -2013,6 +2013,8 @@ class Dataset[T] private[sql](
}
}
}
+ import sqlContext.implicits._
+ rdd.toDS
}
/**
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
index 097ece3525..4671b2dca9 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
@@ -1096,7 +1096,7 @@ class JsonSuite extends QueryTest with SharedSQLContext with TestJsonData {
assert(result2(3) === "{\"f1\":{\"f11\":4,\"f12\":true},\"f2\":{\"D4\":2147483644}}")
val jsonDF = sqlContext.read.json(primitiveFieldAndType)
- val primTable = sqlContext.read.json(jsonDF.toJSON)
+ val primTable = sqlContext.read.json(jsonDF.toJSON.rdd)
primTable.registerTempTable("primitiveTable")
checkAnswer(
sql("select * from primitiveTable"),
@@ -1109,7 +1109,7 @@ class JsonSuite extends QueryTest with SharedSQLContext with TestJsonData {
)
val complexJsonDF = sqlContext.read.json(complexFieldAndType1)
- val compTable = sqlContext.read.json(complexJsonDF.toJSON)
+ val compTable = sqlContext.read.json(complexJsonDF.toJSON.rdd)
compTable.registerTempTable("complexTable")
// Access elements of a primitive array.
checkAnswer(