aboutsummaryrefslogtreecommitdiff
path: root/streaming/src
diff options
context:
space:
mode:
authorTathagata Das <tathagata.das1565@gmail.com>2012-12-02 02:03:05 +0000
committerTathagata Das <tathagata.das1565@gmail.com>2012-12-02 02:03:05 +0000
commitb4dba55f78b0dfda728cf69c9c17e4863010d28d (patch)
tree8c6fc597b73ebb8aa96f15468e2ee258ef8b2970 /streaming/src
parent477de94894b7d8eeed281d33c12bcb2269d117c7 (diff)
downloadspark-b4dba55f78b0dfda728cf69c9c17e4863010d28d.tar.gz
spark-b4dba55f78b0dfda728cf69c9c17e4863010d28d.tar.bz2
spark-b4dba55f78b0dfda728cf69c9c17e4863010d28d.zip
Made RDD checkpoint not create a new thread. Fixed bug in detecting when spark.cleaner.delay is insufficient.
Diffstat (limited to 'streaming/src')
-rw-r--r--streaming/src/main/scala/spark/streaming/DStream.scala9
1 files changed, 5 insertions, 4 deletions
diff --git a/streaming/src/main/scala/spark/streaming/DStream.scala b/streaming/src/main/scala/spark/streaming/DStream.scala
index 28a3e2dfc7..d2e9de110e 100644
--- a/streaming/src/main/scala/spark/streaming/DStream.scala
+++ b/streaming/src/main/scala/spark/streaming/DStream.scala
@@ -182,14 +182,15 @@ extends Serializable with Logging {
checkpointInterval + "). Please set it to higher than " + checkpointInterval + "."
)
- val metadataCleanupDelay = System.getProperty("spark.cleanup.delay", "-1").toDouble
+ val metadataCleanerDelay = spark.util.MetadataCleaner.getDelaySeconds
+ logInfo("metadataCleanupDelay = " + metadataCleanerDelay)
assert(
- metadataCleanupDelay < 0 || rememberDuration < metadataCleanupDelay * 60 * 1000,
+ metadataCleanerDelay < 0 || rememberDuration < metadataCleanerDelay * 1000,
"It seems you are doing some DStream window operation or setting a checkpoint interval " +
"which requires " + this.getClass.getSimpleName + " to remember generated RDDs for more " +
"than " + rememberDuration.milliseconds + " milliseconds. But the Spark's metadata cleanup" +
- "delay is set to " + metadataCleanupDelay + " minutes, which is not sufficient. Please set " +
- "the Java property 'spark.cleanup.delay' to more than " +
+ "delay is set to " + (metadataCleanerDelay / 60.0) + " minutes, which is not sufficient. Please set " +
+ "the Java property 'spark.cleaner.delay' to more than " +
math.ceil(rememberDuration.millis.toDouble / 60000.0).toInt + " minutes."
)