aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/main
diff options
context:
space:
mode:
authorYin Huai <huai@cse.ohio-state.edu>2014-09-16 11:40:28 -0700
committerMichael Armbrust <michael@databricks.com>2014-09-16 11:40:28 -0700
commit7583699873fb4f252c6ce65db1096783ef438731 (patch)
treef3c5d64dfd766f4fd19f14ab680664efd0df0e23 /sql/core/src/main
parent9d5fa763d8559ac412a18d7a2f43c4368a0af897 (diff)
downloadspark-7583699873fb4f252c6ce65db1096783ef438731.tar.gz
spark-7583699873fb4f252c6ce65db1096783ef438731.tar.bz2
spark-7583699873fb4f252c6ce65db1096783ef438731.zip
[SPARK-3308][SQL] Ability to read JSON Arrays as tables
This PR aims to support reading top level JSON arrays and take every element in such an array as a row (an empty array will not generate a row). JIRA: https://issues.apache.org/jira/browse/SPARK-3308 Author: Yin Huai <huai@cse.ohio-state.edu> Closes #2400 from yhuai/SPARK-3308 and squashes the following commits: 990077a [Yin Huai] Handle top level JSON arrays.
Diffstat (limited to 'sql/core/src/main')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/json/JsonRDD.scala10
1 files changed, 7 insertions, 3 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/json/JsonRDD.scala b/sql/core/src/main/scala/org/apache/spark/sql/json/JsonRDD.scala
index 873221835d..0f27fd13e7 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/json/JsonRDD.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/json/JsonRDD.scala
@@ -287,9 +287,13 @@ private[sql] object JsonRDD extends Logging {
// the ObjectMapper will take the last value associated with this duplicate key.
// For example: for {"key": 1, "key":2}, we will get "key"->2.
val mapper = new ObjectMapper()
- iter.map { record =>
- val parsed = scalafy(mapper.readValue(record, classOf[java.util.Map[String, Any]]))
- parsed.asInstanceOf[Map[String, Any]]
+ iter.flatMap { record =>
+ val parsed = mapper.readValue(record, classOf[Object]) match {
+ case map: java.util.Map[_, _] => scalafy(map).asInstanceOf[Map[String, Any]] :: Nil
+ case list: java.util.List[_] => scalafy(list).asInstanceOf[Seq[Map[String, Any]]]
+ }
+
+ parsed
}
})
}