aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala4
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala18
2 files changed, 22 insertions, 0 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala
index 7db5fd804d..79234f8a66 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala
@@ -93,6 +93,10 @@ case class InsertIntoHiveTable(
(o: Any) => seqAsJavaList(o.asInstanceOf[Seq[_]].map(wrapper))
case moi: MapObjectInspector =>
+ // The Predef.Map is scala.collection.immutable.Map.
+ // Since the map values can be mutable, we explicitly import scala.collection.Map at here.
+ import scala.collection.Map
+
val keyWrapper = wrapperFor(moi.getMapKeyObjectInspector)
val valueWrapper = wrapperFor(moi.getMapValueObjectInspector)
(o: Any) => mapAsJavaMap(o.asInstanceOf[Map[_, _]].map { case (key, value) =>
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala
index 7e323146f9..18dc937dd2 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala
@@ -18,6 +18,7 @@
package org.apache.spark.sql.hive
import org.apache.spark.sql.QueryTest
+import org.apache.spark.sql._
import org.apache.spark.sql.hive.test.TestHive
/* Implicits */
@@ -73,4 +74,21 @@ class InsertIntoHiveTableSuite extends QueryTest {
createTable[TestData]("createAndInsertTest")
createTable[TestData]("createAndInsertTest")
}
+
+ test("SPARK-4052: scala.collection.Map as value type of MapType") {
+ val schema = StructType(StructField("m", MapType(StringType, StringType), true) :: Nil)
+ val rowRDD = TestHive.sparkContext.parallelize(
+ (1 to 100).map(i => Row(scala.collection.mutable.HashMap(s"key$i" -> s"value$i"))))
+ val schemaRDD = applySchema(rowRDD, schema)
+ schemaRDD.registerTempTable("tableWithMapValue")
+ sql("CREATE TABLE hiveTableWithMapValue(m MAP <STRING, STRING>)")
+ sql("INSERT OVERWRITE TABLE hiveTableWithMapValue SELECT m FROM tableWithMapValue")
+
+ checkAnswer(
+ sql("SELECT * FROM hiveTableWithMapValue"),
+ rowRDD.collect().toSeq
+ )
+
+ sql("DROP TABLE hiveTableWithMapValue")
+ }
}