aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/test/scala
diff options
context:
space:
mode:
authorDavies Liu <davies@databricks.com>2016-09-30 09:59:12 -0700
committerDavies Liu <davies.liu@gmail.com>2016-09-30 09:59:12 -0700
commitf327e16863371076dbd2a7f22c8895ae07f8274b (patch)
tree2bc96f7cab165e6311be596286697e2da37c4124 /sql/core/src/test/scala
parent8e491af52930886cbe0c54e7d67add3796ddb15f (diff)
downloadspark-f327e16863371076dbd2a7f22c8895ae07f8274b.tar.gz
spark-f327e16863371076dbd2a7f22c8895ae07f8274b.tar.bz2
spark-f327e16863371076dbd2a7f22c8895ae07f8274b.zip
[SPARK-17738] [SQL] fix ARRAY/MAP in columnar cache
## What changes were proposed in this pull request? The actualSize() of array and map is different from the actual size, the header is Int, rather than Long. ## How was this patch tested? The flaky test should be fixed. Author: Davies Liu <davies@databricks.com> Closes #15305 from davies/fix_MAP.
Diffstat (limited to 'sql/core/src/test/scala')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnTypeSuite.scala6
1 files changed, 3 insertions, 3 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnTypeSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnTypeSuite.scala
index 0b93c633b2..805b566728 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnTypeSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnTypeSuite.scala
@@ -38,7 +38,7 @@ class ColumnTypeSuite extends SparkFunSuite with Logging {
val checks = Map(
NULL -> 0, BOOLEAN -> 1, BYTE -> 1, SHORT -> 2, INT -> 4, LONG -> 8,
FLOAT -> 4, DOUBLE -> 8, COMPACT_DECIMAL(15, 10) -> 8, LARGE_DECIMAL(20, 10) -> 12,
- STRING -> 8, BINARY -> 16, STRUCT_TYPE -> 20, ARRAY_TYPE -> 16, MAP_TYPE -> 32)
+ STRING -> 8, BINARY -> 16, STRUCT_TYPE -> 20, ARRAY_TYPE -> 28, MAP_TYPE -> 68)
checks.foreach { case (columnType, expectedSize) =>
assertResult(expectedSize, s"Wrong defaultSize for $columnType") {
@@ -73,8 +73,8 @@ class ColumnTypeSuite extends SparkFunSuite with Logging {
checkActualSize(BINARY, Array.fill[Byte](4)(0.toByte), 4 + 4)
checkActualSize(COMPACT_DECIMAL(15, 10), Decimal(0, 15, 10), 8)
checkActualSize(LARGE_DECIMAL(20, 10), Decimal(0, 20, 10), 5)
- checkActualSize(ARRAY_TYPE, Array[Any](1), 8 + 8 + 8 + 8)
- checkActualSize(MAP_TYPE, Map(1 -> "a"), 8 + (8 + 8 + 8 + 8) + (8 + 8 + 8 + 8))
+ checkActualSize(ARRAY_TYPE, Array[Any](1), 4 + 8 + 8 + 8)
+ checkActualSize(MAP_TYPE, Map(1 -> "a"), 4 + (8 + 8 + 8 + 8) + (8 + 8 + 8 + 8))
checkActualSize(STRUCT_TYPE, Row("hello"), 28)
}