aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSameer Abhyankar <sabhyankar@sabhyankar-MBP.local>2015-07-31 13:08:55 -0700
committerTathagata Das <tathagata.das1565@gmail.com>2015-07-31 13:08:55 -0700
commit060c79aab58efd4ce7353a1b00534de0d9e1de0b (patch)
treece6380492e56f384647a7fd4b11f01638db3fc66
parent3c0d2e55210735e0df2f8febb5f63c224af230e3 (diff)
downloadspark-060c79aab58efd4ce7353a1b00534de0d9e1de0b.tar.gz
spark-060c79aab58efd4ce7353a1b00534de0d9e1de0b.tar.bz2
spark-060c79aab58efd4ce7353a1b00534de0d9e1de0b.zip
[SPARK-9056] [STREAMING] Rename configuration `spark.streaming.minRememberDuration` to `spark.streaming.fileStream.minRememberDuration`
Rename configuration `spark.streaming.minRememberDuration` to `spark.streaming.fileStream.minRememberDuration` Author: Sameer Abhyankar <sabhyankar@sabhyankar-MBP.local> Author: Sameer Abhyankar <sabhyankar@sabhyankar-MBP.Samavihome> Closes #7740 from sabhyankar/spark_branch_9056 and squashes the following commits: d5b2f1f [Sameer Abhyankar] Correct deprecated version to 1.5 1268133 [Sameer Abhyankar] Add {} and indentation ddf9844 [Sameer Abhyankar] Change 4 space indentation to 2 space indentation 1819b5f [Sameer Abhyankar] Use spark.streaming.fileStream.minRememberDuration property in lieu of spark.streaming.minRememberDuration
-rw-r--r--core/src/main/scala/org/apache/spark/SparkConf.scala4
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/dstream/FileInputDStream.scala6
2 files changed, 7 insertions, 3 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala
index 4161792976..08bab4bf27 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -548,7 +548,9 @@ private[spark] object SparkConf extends Logging {
"spark.rpc.askTimeout" -> Seq(
AlternateConfig("spark.akka.askTimeout", "1.4")),
"spark.rpc.lookupTimeout" -> Seq(
- AlternateConfig("spark.akka.lookupTimeout", "1.4"))
+ AlternateConfig("spark.akka.lookupTimeout", "1.4")),
+ "spark.streaming.fileStream.minRememberDuration" -> Seq(
+ AlternateConfig("spark.streaming.minRememberDuration", "1.5"))
)
/**
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/FileInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FileInputDStream.scala
index dd4da9d9ca..c358f5b5bd 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/FileInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FileInputDStream.scala
@@ -86,8 +86,10 @@ class FileInputDStream[K, V, F <: NewInputFormat[K, V]](
* Files with mod times older than this "window" of remembering will be ignored. So if new
* files are visible within this window, then the file will get selected in the next batch.
*/
- private val minRememberDurationS =
- Seconds(ssc.conf.getTimeAsSeconds("spark.streaming.minRememberDuration", "60s"))
+ private val minRememberDurationS = {
+ Seconds(ssc.conf.getTimeAsSeconds("spark.streaming.fileStream.minRememberDuration",
+ ssc.conf.get("spark.streaming.minRememberDuration", "60s")))
+ }
// This is a def so that it works during checkpoint recovery:
private def clock = ssc.scheduler.clock