aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/main
diff options
context:
space:
mode:
authorTakuya UESHIN <ueshin@happy-camper.st>2014-08-26 15:04:08 -0700
committerMichael Armbrust <michael@databricks.com>2014-08-26 15:04:08 -0700
commit6b5584ef1c605cd30f25dbe7099ab32aea1746fb (patch)
tree2e5abc226595b23bdb1b43de2800942afe252ea5 /sql/core/src/main
parent98c2bb0bbde6fb2b6f64af3efffefcb0dae94c12 (diff)
downloadspark-6b5584ef1c605cd30f25dbe7099ab32aea1746fb.tar.gz
spark-6b5584ef1c605cd30f25dbe7099ab32aea1746fb.tar.bz2
spark-6b5584ef1c605cd30f25dbe7099ab32aea1746fb.zip
[SPARK-3063][SQL] ExistingRdd should convert Map to catalyst Map.
Currently `ExistingRdd.convertToCatalyst` doesn't convert `Map` value. Author: Takuya UESHIN <ueshin@happy-camper.st> Closes #1963 from ueshin/issues/SPARK-3063 and squashes the following commits: 3ba41f2 [Takuya UESHIN] Merge branch 'master' into issues/SPARK-3063 4d7bae2 [Takuya UESHIN] Merge branch 'master' into issues/SPARK-3063 9321379 [Takuya UESHIN] Merge branch 'master' into issues/SPARK-3063 d8a900a [Takuya UESHIN] Make ExistingRdd.convertToCatalyst be able to convert Map value.
Diffstat (limited to 'sql/core/src/main')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/basicOperators.scala3
1 files changed, 2 insertions, 1 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/basicOperators.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/basicOperators.scala
index f9dfa3c92f..374af48b82 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/basicOperators.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/basicOperators.scala
@@ -206,7 +206,8 @@ case class Sort(
object ExistingRdd {
def convertToCatalyst(a: Any): Any = a match {
case o: Option[_] => o.orNull
- case s: Seq[Any] => s.map(convertToCatalyst)
+ case s: Seq[_] => s.map(convertToCatalyst)
+ case m: Map[_, _] => m.map { case (k, v) => convertToCatalyst(k) -> convertToCatalyst(v) }
case p: Product => new GenericRow(p.productIterator.map(convertToCatalyst).toArray)
case other => other
}