aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/json/JsonRDD.scala9
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala8
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/json/TestJsonData.scala3
3 files changed, 12 insertions, 8 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/json/JsonRDD.scala b/sql/core/src/main/scala/org/apache/spark/sql/json/JsonRDD.scala
index edf8677557..f6cbca9648 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/json/JsonRDD.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/json/JsonRDD.scala
@@ -198,11 +198,12 @@ private[sql] object JsonRDD extends Logging {
* in this JSON object can appear in other JSON objects.
*/
private def allKeysWithValueTypes(m: Map[String, Any]): Set[(String, DataType)] = {
- m.map{
+ val keyValuePairs = m.map {
// Quote the key with backticks to handle cases which have dots
// in the field name.
- case (key, dataType) => (s"`$key`", dataType)
- }.flatMap {
+ case (key, value) => (s"`$key`", value)
+ }.toSet
+ keyValuePairs.flatMap {
case (key: String, struct: Map[String, Any]) => {
// The value associted with the key is an JSON object.
allKeysWithValueTypes(struct).map {
@@ -224,7 +225,7 @@ private[sql] object JsonRDD extends Logging {
}
}
case (key: String, value) => (key, typeOfPrimitiveValue(value)) :: Nil
- }.toSet
+ }
}
/**
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala
index 10bd9f08f0..e765cfc83a 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala
@@ -451,7 +451,9 @@ class JsonSuite extends QueryTest {
val jsonSchemaRDD = jsonRDD(arrayElementTypeConflict)
val expectedSchema =
- AttributeReference("array", ArrayType(StringType), true)() :: Nil
+ AttributeReference("array1", ArrayType(StringType), true)() ::
+ AttributeReference("array2", ArrayType(StructType(
+ StructField("field", LongType, true) :: Nil)), true)() :: Nil
comparePlans(Schema(expectedSchema), Schema(jsonSchemaRDD.logicalPlan.output))
@@ -460,12 +462,12 @@ class JsonSuite extends QueryTest {
checkAnswer(
sql("select * from jsonTable"),
Seq(Seq("1", "1.1", "true", null, "[]", "{}", "[2,3,4]",
- """{"field":str}""")) :: Nil
+ """{"field":str}"""), Seq(Seq(214748364700L), Seq(1))) :: Nil
)
// Treat an element as a number.
checkAnswer(
- sql("select array[0] + 1 from jsonTable"),
+ sql("select array1[0] + 1 from jsonTable"),
2
)
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/json/TestJsonData.scala b/sql/core/src/test/scala/org/apache/spark/sql/json/TestJsonData.scala
index 065e04046e..d0180f3754 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/json/TestJsonData.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/json/TestJsonData.scala
@@ -72,7 +72,8 @@ object TestJsonData {
val arrayElementTypeConflict =
TestSQLContext.sparkContext.parallelize(
- """{"array": [1, 1.1, true, null, [], {}, [2,3,4], {"field":"str"}]}""" :: Nil)
+ """{"array1": [1, 1.1, true, null, [], {}, [2,3,4], {"field":"str"}],
+ "array2": [{"field":214748364700}, {"field":1}]}""" :: Nil)
val missingFields =
TestSQLContext.sparkContext.parallelize(