aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorFeynman Liang <fliang@databricks.com>2015-08-30 23:12:56 -0700
committerReynold Xin <rxin@databricks.com>2015-08-30 23:12:56 -0700
commit8694c3ad7dcafca9563649e93b7a08076748d6f2 (patch)
treefd17897047316569da0e1d07d5b019c305274c4b /sql
parent35e896a79bb5e72d63b82b047f46f4f6fa2e1970 (diff)
downloadspark-8694c3ad7dcafca9563649e93b7a08076748d6f2.tar.gz
spark-8694c3ad7dcafca9563649e93b7a08076748d6f2.tar.bz2
spark-8694c3ad7dcafca9563649e93b7a08076748d6f2.zip
[SPARK-10351] [SQL] Fixes UTF8String.fromAddress to handle off-heap memory
CC rxin marmbrus Author: Feynman Liang <fliang@databricks.com> Closes #8523 from feynmanliang/SPARK-10351.
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/UnsafeRowSuite.scala9
1 files changed, 5 insertions, 4 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/UnsafeRowSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/UnsafeRowSuite.scala
index 219435dff5..2476b10e3c 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/UnsafeRowSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/UnsafeRowSuite.scala
@@ -43,12 +43,12 @@ class UnsafeRowSuite extends SparkFunSuite {
val arrayBackedUnsafeRow: UnsafeRow =
UnsafeProjection.create(Array[DataType](StringType, StringType, IntegerType)).apply(row)
assert(arrayBackedUnsafeRow.getBaseObject.isInstanceOf[Array[Byte]])
- val bytesFromArrayBackedRow: Array[Byte] = {
+ val (bytesFromArrayBackedRow, field0StringFromArrayBackedRow): (Array[Byte], String) = {
val baos = new ByteArrayOutputStream()
arrayBackedUnsafeRow.writeToStream(baos, null)
- baos.toByteArray
+ (baos.toByteArray, arrayBackedUnsafeRow.getString(0))
}
- val bytesFromOffheapRow: Array[Byte] = {
+ val (bytesFromOffheapRow, field0StringFromOffheapRow): (Array[Byte], String) = {
val offheapRowPage = MemoryAllocator.UNSAFE.allocate(arrayBackedUnsafeRow.getSizeInBytes)
try {
Platform.copyMemory(
@@ -69,13 +69,14 @@ class UnsafeRowSuite extends SparkFunSuite {
val baos = new ByteArrayOutputStream()
val writeBuffer = new Array[Byte](1024)
offheapUnsafeRow.writeToStream(baos, writeBuffer)
- baos.toByteArray
+ (baos.toByteArray, offheapUnsafeRow.getString(0))
} finally {
MemoryAllocator.UNSAFE.free(offheapRowPage)
}
}
assert(bytesFromArrayBackedRow === bytesFromOffheapRow)
+ assert(field0StringFromArrayBackedRow === field0StringFromOffheapRow)
}
test("calling getDouble() and getFloat() on null columns") {