aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorYin Huai <huai@cse.ohio-state.edu>2014-10-26 16:30:15 -0700
committerMichael Armbrust <michael@databricks.com>2014-10-26 16:30:15 -0700
commit05308426f0f51273be95fb1ca2cb1ec19d83cec8 (patch)
treee85f9d613631add4863f70de82f6c7b25b7c2e64 /sql/hive
parentd518bc24af54a3853fac457e03c70970354150bb (diff)
downloadspark-05308426f0f51273be95fb1ca2cb1ec19d83cec8.tar.gz
spark-05308426f0f51273be95fb1ca2cb1ec19d83cec8.tar.bz2
spark-05308426f0f51273be95fb1ca2cb1ec19d83cec8.zip
[SPARK-4052][SQL] Use scala.collection.Map for pattern matching instead of using Predef.Map (it is scala.collection.immutable.Map)
Please check https://issues.apache.org/jira/browse/SPARK-4052 for cases triggering this bug. Author: Yin Huai <huai@cse.ohio-state.edu> Closes #2899 from yhuai/SPARK-4052 and squashes the following commits: 1188f70 [Yin Huai] Address liancheng's comments. b6712be [Yin Huai] Use scala.collection.Map instead of Predef.Map (scala.collection.immutable.Map).
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala4
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala18
2 files changed, 22 insertions, 0 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala
index 7db5fd804d..79234f8a66 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala
@@ -93,6 +93,10 @@ case class InsertIntoHiveTable(
(o: Any) => seqAsJavaList(o.asInstanceOf[Seq[_]].map(wrapper))
case moi: MapObjectInspector =>
+ // The Predef.Map is scala.collection.immutable.Map.
+ // Since the map values can be mutable, we explicitly import scala.collection.Map at here.
+ import scala.collection.Map
+
val keyWrapper = wrapperFor(moi.getMapKeyObjectInspector)
val valueWrapper = wrapperFor(moi.getMapValueObjectInspector)
(o: Any) => mapAsJavaMap(o.asInstanceOf[Map[_, _]].map { case (key, value) =>
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala
index 7e323146f9..18dc937dd2 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala
@@ -18,6 +18,7 @@
package org.apache.spark.sql.hive
import org.apache.spark.sql.QueryTest
+import org.apache.spark.sql._
import org.apache.spark.sql.hive.test.TestHive
/* Implicits */
@@ -73,4 +74,21 @@ class InsertIntoHiveTableSuite extends QueryTest {
createTable[TestData]("createAndInsertTest")
createTable[TestData]("createAndInsertTest")
}
+
+ test("SPARK-4052: scala.collection.Map as value type of MapType") {
+ val schema = StructType(StructField("m", MapType(StringType, StringType), true) :: Nil)
+ val rowRDD = TestHive.sparkContext.parallelize(
+ (1 to 100).map(i => Row(scala.collection.mutable.HashMap(s"key$i" -> s"value$i"))))
+ val schemaRDD = applySchema(rowRDD, schema)
+ schemaRDD.registerTempTable("tableWithMapValue")
+ sql("CREATE TABLE hiveTableWithMapValue(m MAP <STRING, STRING>)")
+ sql("INSERT OVERWRITE TABLE hiveTableWithMapValue SELECT m FROM tableWithMapValue")
+
+ checkAnswer(
+ sql("SELECT * FROM hiveTableWithMapValue"),
+ rowRDD.collect().toSeq
+ )
+
+ sql("DROP TABLE hiveTableWithMapValue")
+ }
}