aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorStavros Kontopoulos <stavros.kontopoulos@typesafe.com>2016-03-15 12:18:30 -0700
committerReynold Xin <rxin@databricks.com>2016-03-15 12:18:30 -0700
commit50e3644d00c75de8273cce191debde68061e6e13 (patch)
tree7056ffa7c80e360f824c207419836c614013227a /sql
parentd89c71417c384e1a2820bd80b67c26e11405aebc (diff)
downloadspark-50e3644d00c75de8273cce191debde68061e6e13.tar.gz
spark-50e3644d00c75de8273cce191debde68061e6e13.tar.bz2
spark-50e3644d00c75de8273cce191debde68061e6e13.zip
[SPARK-13896][SQL][STRING] Dataset.toJSON should return Dataset
## What changes were proposed in this pull request? Change the return type of toJson in Dataset class ## How was this patch tested? No additional unit test required. Author: Stavros Kontopoulos <stavros.kontopoulos@typesafe.com> Closes #11732 from skonto/fix_toJson.
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala6
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala4
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala8
3 files changed, 10 insertions, 8 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
index f7ef0de21c..969fcdf428 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
@@ -1989,9 +1989,9 @@ class Dataset[T] private[sql](
* @group rdd
* @since 1.3.0
*/
- def toJSON: RDD[String] = {
+ def toJSON: Dataset[String] = {
val rowSchema = this.schema
- queryExecution.toRdd.mapPartitions { iter =>
+ val rdd = queryExecution.toRdd.mapPartitions { iter =>
val writer = new CharArrayWriter()
// create the Generator without separator inserted between 2 records
val gen = new JsonFactory().createGenerator(writer).setRootValueSeparator(null)
@@ -2013,6 +2013,8 @@ class Dataset[T] private[sql](
}
}
}
+ import sqlContext.implicits._
+ rdd.toDS
}
/**
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
index 097ece3525..4671b2dca9 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
@@ -1096,7 +1096,7 @@ class JsonSuite extends QueryTest with SharedSQLContext with TestJsonData {
assert(result2(3) === "{\"f1\":{\"f11\":4,\"f12\":true},\"f2\":{\"D4\":2147483644}}")
val jsonDF = sqlContext.read.json(primitiveFieldAndType)
- val primTable = sqlContext.read.json(jsonDF.toJSON)
+ val primTable = sqlContext.read.json(jsonDF.toJSON.rdd)
primTable.registerTempTable("primitiveTable")
checkAnswer(
sql("select * from primitiveTable"),
@@ -1109,7 +1109,7 @@ class JsonSuite extends QueryTest with SharedSQLContext with TestJsonData {
)
val complexJsonDF = sqlContext.read.json(complexFieldAndType1)
- val compTable = sqlContext.read.json(complexJsonDF.toJSON)
+ val compTable = sqlContext.read.json(complexJsonDF.toJSON.rdd)
compTable.registerTempTable("complexTable")
// Access elements of a primitive array.
checkAnswer(
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
index d7974f1ee3..81420fea11 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
@@ -164,7 +164,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
test("check change without refresh") {
withTempPath { tempDir =>
withTable("jsonTable") {
- (("a", "b") :: Nil).toDF().toJSON.saveAsTextFile(tempDir.getCanonicalPath)
+ (("a", "b") :: Nil).toDF().toJSON.rdd.saveAsTextFile(tempDir.getCanonicalPath)
sql(
s"""CREATE TABLE jsonTable
@@ -179,7 +179,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
Row("a", "b"))
Utils.deleteRecursively(tempDir)
- (("a1", "b1", "c1") :: Nil).toDF().toJSON.saveAsTextFile(tempDir.getCanonicalPath)
+ (("a1", "b1", "c1") :: Nil).toDF().toJSON.rdd.saveAsTextFile(tempDir.getCanonicalPath)
// Schema is cached so the new column does not show. The updated values in existing columns
// will show.
@@ -199,7 +199,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
test("drop, change, recreate") {
withTempPath { tempDir =>
- (("a", "b") :: Nil).toDF().toJSON.saveAsTextFile(tempDir.getCanonicalPath)
+ (("a", "b") :: Nil).toDF().toJSON.rdd.saveAsTextFile(tempDir.getCanonicalPath)
withTable("jsonTable") {
sql(
@@ -215,7 +215,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
Row("a", "b"))
Utils.deleteRecursively(tempDir)
- (("a", "b", "c") :: Nil).toDF().toJSON.saveAsTextFile(tempDir.getCanonicalPath)
+ (("a", "b", "c") :: Nil).toDF().toJSON.rdd.saveAsTextFile(tempDir.getCanonicalPath)
sql("DROP TABLE jsonTable")