aboutsummaryrefslogtreecommitdiff
path: root/core/src/test
diff options
context:
space:
mode:
authorJosh Rosen <joshrosen@databricks.com>2016-01-06 20:50:31 -0800
committerReynold Xin <rxin@databricks.com>2016-01-06 20:50:31 -0800
commit8e19c7663a067d55b32af68d62da42c7cd5d6009 (patch)
tree331132f4bc4dcc48d94acda2ff5d456af849ab77 /core/src/test
parent6b6d02be0d4e2ce562dddfb391b3302f79de8276 (diff)
downloadspark-8e19c7663a067d55b32af68d62da42c7cd5d6009.tar.gz
spark-8e19c7663a067d55b32af68d62da42c7cd5d6009.tar.bz2
spark-8e19c7663a067d55b32af68d62da42c7cd5d6009.zip
[SPARK-7689] Remove TTL-based metadata cleaning in Spark 2.0
This PR removes `spark.cleaner.ttl` and the associated TTL-based metadata cleaning code. Now that we have the `ContextCleaner` and a timer to trigger periodic GCs, I don't think that `spark.cleaner.ttl` is necessary anymore. The TTL-based cleaning isn't enabled by default, isn't included in our end-to-end tests, and has been a source of user confusion when it is misconfigured. If the TTL is set too low, data which is still being used may be evicted / deleted, leading to hard to diagnose bugs. For all of these reasons, I think that we should remove this functionality in Spark 2.0. Additional benefits of doing this include marginally reduced memory usage, since we no longer need to store timetsamps in hashmaps, and a handful fewer threads. Author: Josh Rosen <joshrosen@databricks.com> Closes #10534 from JoshRosen/remove-ttl-based-cleaning.
Diffstat (limited to 'core/src/test')
-rw-r--r--core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala86
1 files changed, 0 insertions, 86 deletions
diff --git a/core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala b/core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala
index 9b3169026c..25fc15dd54 100644
--- a/core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala
@@ -17,8 +17,6 @@
package org.apache.spark.util
-import java.lang.ref.WeakReference
-
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import scala.util.Random
@@ -34,10 +32,6 @@ class TimeStampedHashMapSuite extends SparkFunSuite {
testMap(new TimeStampedHashMap[String, String]())
testMapThreadSafety(new TimeStampedHashMap[String, String]())
- // Test TimeStampedWeakValueHashMap basic functionality
- testMap(new TimeStampedWeakValueHashMap[String, String]())
- testMapThreadSafety(new TimeStampedWeakValueHashMap[String, String]())
-
test("TimeStampedHashMap - clearing by timestamp") {
// clearing by insertion time
val map = new TimeStampedHashMap[String, String](updateTimeStampOnGet = false)
@@ -68,86 +62,6 @@ class TimeStampedHashMapSuite extends SparkFunSuite {
assert(map1.get("k2").isDefined)
}
- test("TimeStampedWeakValueHashMap - clearing by timestamp") {
- // clearing by insertion time
- val map = new TimeStampedWeakValueHashMap[String, String](updateTimeStampOnGet = false)
- map("k1") = "v1"
- assert(map("k1") === "v1")
- Thread.sleep(10)
- val threshTime = System.currentTimeMillis
- assert(map.getTimestamp("k1").isDefined)
- assert(map.getTimestamp("k1").get < threshTime)
- map.clearOldValues(threshTime)
- assert(map.get("k1") === None)
-
- // clearing by modification time
- val map1 = new TimeStampedWeakValueHashMap[String, String](updateTimeStampOnGet = true)
- map1("k1") = "v1"
- map1("k2") = "v2"
- assert(map1("k1") === "v1")
- Thread.sleep(10)
- val threshTime1 = System.currentTimeMillis
- Thread.sleep(10)
- assert(map1("k2") === "v2") // access k2 to update its access time to > threshTime
- assert(map1.getTimestamp("k1").isDefined)
- assert(map1.getTimestamp("k1").get < threshTime1)
- assert(map1.getTimestamp("k2").isDefined)
- assert(map1.getTimestamp("k2").get >= threshTime1)
- map1.clearOldValues(threshTime1) // should only clear k1
- assert(map1.get("k1") === None)
- assert(map1.get("k2").isDefined)
- }
-
- test("TimeStampedWeakValueHashMap - clearing weak references") {
- var strongRef = new Object
- val weakRef = new WeakReference(strongRef)
- val map = new TimeStampedWeakValueHashMap[String, Object]
- map("k1") = strongRef
- map("k2") = "v2"
- map("k3") = "v3"
- val isEquals = map("k1") == strongRef
- assert(isEquals)
-
- // clear strong reference to "k1"
- strongRef = null
- val startTime = System.currentTimeMillis
- System.gc() // Make a best effort to run the garbage collection. It *usually* runs GC.
- System.runFinalization() // Make a best effort to call finalizer on all cleaned objects.
- while(System.currentTimeMillis - startTime < 10000 && weakRef.get != null) {
- System.gc()
- System.runFinalization()
- Thread.sleep(100)
- }
- assert(map.getReference("k1").isDefined)
- val ref = map.getReference("k1").get
- assert(ref.get === null)
- assert(map.get("k1") === None)
-
- // operations should only display non-null entries
- assert(map.iterator.forall { case (k, v) => k != "k1" })
- assert(map.filter { case (k, v) => k != "k2" }.size === 1)
- assert(map.filter { case (k, v) => k != "k2" }.head._1 === "k3")
- assert(map.toMap.size === 2)
- assert(map.toMap.forall { case (k, v) => k != "k1" })
- val buffer = new ArrayBuffer[String]
- map.foreach { case (k, v) => buffer += v.toString }
- assert(buffer.size === 2)
- assert(buffer.forall(_ != "k1"))
- val plusMap = map + (("k4", "v4"))
- assert(plusMap.size === 3)
- assert(plusMap.forall { case (k, v) => k != "k1" })
- val minusMap = map - "k2"
- assert(minusMap.size === 1)
- assert(minusMap.head._1 == "k3")
-
- // clear null values - should only clear k1
- map.clearNullValues()
- assert(map.getReference("k1") === None)
- assert(map.get("k1") === None)
- assert(map.get("k2").isDefined)
- assert(map.get("k2").get === "v2")
- }
-
/** Test basic operations of a Scala mutable Map. */
def testMap(hashMapConstructor: => mutable.Map[String, String]) {
def newMap() = hashMapConstructor