aboutsummaryrefslogtreecommitdiff
path: root/core/src/test
diff options
context:
space:
mode:
authorAndrew Or <andrewor14@gmail.com>2014-07-03 10:26:50 -0700
committerAaron Davidson <aaron@databricks.com>2014-07-03 10:26:50 -0700
commitc480537739f9329ebfd580f09c69778e6c976366 (patch)
tree95526d3961b1aa35adc01cb2e652aff9532e9639 /core/src/test
parent3bbeca648985b32bdf1eedef779cb2817eb6dfa4 (diff)
downloadspark-c480537739f9329ebfd580f09c69778e6c976366.tar.gz
spark-c480537739f9329ebfd580f09c69778e6c976366.tar.bz2
spark-c480537739f9329ebfd580f09c69778e6c976366.zip
[SPARK] Fix NPE for ExternalAppendOnlyMap
It did not handle null keys very gracefully before. Author: Andrew Or <andrewor14@gmail.com> Closes #1288 from andrewor14/fix-external and squashes the following commits: 312b8d8 [Andrew Or] Abstract key hash code ed5adf9 [Andrew Or] Fix NPE for ExternalAppendOnlyMap
Diffstat (limited to 'core/src/test')
-rw-r--r--core/src/test/scala/org/apache/spark/util/collection/ExternalAppendOnlyMapSuite.scala27
1 files changed, 24 insertions, 3 deletions
diff --git a/core/src/test/scala/org/apache/spark/util/collection/ExternalAppendOnlyMapSuite.scala b/core/src/test/scala/org/apache/spark/util/collection/ExternalAppendOnlyMapSuite.scala
index deb7809535..428822949c 100644
--- a/core/src/test/scala/org/apache/spark/util/collection/ExternalAppendOnlyMapSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/collection/ExternalAppendOnlyMapSuite.scala
@@ -334,8 +334,8 @@ class ExternalAppendOnlyMapSuite extends FunSuite with LocalSparkContext {
conf.set("spark.shuffle.memoryFraction", "0.001")
sc = new SparkContext("local-cluster[1,1,512]", "test", conf)
- val map = new ExternalAppendOnlyMap[Int, Int, ArrayBuffer[Int]](createCombiner,
- mergeValue, mergeCombiners)
+ val map = new ExternalAppendOnlyMap[Int, Int, ArrayBuffer[Int]](
+ createCombiner, mergeValue, mergeCombiners)
(1 to 100000).foreach { i => map.insert(i, i) }
map.insert(Int.MaxValue, Int.MaxValue)
@@ -346,11 +346,32 @@ class ExternalAppendOnlyMapSuite extends FunSuite with LocalSparkContext {
it.next()
}
}
+
+ test("spilling with null keys and values") {
+ val conf = new SparkConf(true)
+ conf.set("spark.shuffle.memoryFraction", "0.001")
+ sc = new SparkContext("local-cluster[1,1,512]", "test", conf)
+
+ val map = new ExternalAppendOnlyMap[Int, Int, ArrayBuffer[Int]](
+ createCombiner, mergeValue, mergeCombiners)
+
+ (1 to 100000).foreach { i => map.insert(i, i) }
+ map.insert(null.asInstanceOf[Int], 1)
+ map.insert(1, null.asInstanceOf[Int])
+ map.insert(null.asInstanceOf[Int], null.asInstanceOf[Int])
+
+ val it = map.iterator
+ while (it.hasNext) {
+ // Should not throw NullPointerException
+ it.next()
+ }
+ }
+
}
/**
* A dummy class that always returns the same hash code, to easily test hash collisions
*/
-case class FixedHashObject(val v: Int, val h: Int) extends Serializable {
+case class FixedHashObject(v: Int, h: Int) extends Serializable {
override def hashCode(): Int = h
}