aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst/src/main
diff options
context:
space:
mode:
authorLiwei Lin <lwlin7@gmail.com>2016-08-25 11:24:40 +0200
committerHerman van Hovell <hvanhovell@databricks.com>2016-08-25 11:24:40 +0200
commite0b20f9f24d5c3304bf517a4dcfb0da93be5bc75 (patch)
treef59230d7e3c65c874647bd09c37c4decbd23f7d6 /sql/catalyst/src/main
parent2bcd5d5ce3eaf0eb1600a12a2b55ddb40927533b (diff)
downloadspark-e0b20f9f24d5c3304bf517a4dcfb0da93be5bc75.tar.gz
spark-e0b20f9f24d5c3304bf517a4dcfb0da93be5bc75.tar.bz2
spark-e0b20f9f24d5c3304bf517a4dcfb0da93be5bc75.zip
[SPARK-17061][SPARK-17093][SQL] MapObjects` should make copies of unsafe-backed data
## What changes were proposed in this pull request? Currently `MapObjects` does not make copies of unsafe-backed data, leading to problems like [SPARK-17061](https://issues.apache.org/jira/browse/SPARK-17061) [SPARK-17093](https://issues.apache.org/jira/browse/SPARK-17093). This patch makes `MapObjects` make copies of unsafe-backed data. Generated code - prior to this patch: ```java ... /* 295 */ if (isNull12) { /* 296 */ convertedArray1[loopIndex1] = null; /* 297 */ } else { /* 298 */ convertedArray1[loopIndex1] = value12; /* 299 */ } ... ``` Generated code - after this patch: ```java ... /* 295 */ if (isNull12) { /* 296 */ convertedArray1[loopIndex1] = null; /* 297 */ } else { /* 298 */ convertedArray1[loopIndex1] = value12 instanceof UnsafeRow? value12.copy() : value12; /* 299 */ } ... ``` ## How was this patch tested? Add a new test case which would fail without this patch. Author: Liwei Lin <lwlin7@gmail.com> Closes #14698 from lw-lin/mapobjects-copy.
Diffstat (limited to 'sql/catalyst/src/main')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala12
1 files changed, 11 insertions, 1 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
index 31ed485317..4da74a0a27 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
@@ -494,6 +494,16 @@ case class MapObjects private(
s"$seq == null ? $array[$loopIndex] : $seq.apply($loopIndex)"
}
+ // Make a copy of the data if it's unsafe-backed
+ def makeCopyIfInstanceOf(clazz: Class[_ <: Any], value: String) =
+ s"$value instanceof ${clazz.getSimpleName}? ${value}.copy() : $value"
+ val genFunctionValue = lambdaFunction.dataType match {
+ case StructType(_) => makeCopyIfInstanceOf(classOf[UnsafeRow], genFunction.value)
+ case ArrayType(_, _) => makeCopyIfInstanceOf(classOf[UnsafeArrayData], genFunction.value)
+ case MapType(_, _, _) => makeCopyIfInstanceOf(classOf[UnsafeMapData], genFunction.value)
+ case _ => genFunction.value
+ }
+
val loopNullCheck = inputDataType match {
case _: ArrayType => s"$loopIsNull = ${genInputData.value}.isNullAt($loopIndex);"
// The element of primitive array will never be null.
@@ -521,7 +531,7 @@ case class MapObjects private(
if (${genFunction.isNull}) {
$convertedArray[$loopIndex] = null;
} else {
- $convertedArray[$loopIndex] = ${genFunction.value};
+ $convertedArray[$loopIndex] = $genFunctionValue;
}
$loopIndex += 1;