aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTathagata Das <tathagata.das1565@gmail.com>2015-08-18 23:37:57 -0700
committerTathagata Das <tathagata.das1565@gmail.com>2015-08-18 23:38:13 -0700
commit392bd19d678567751cd3844d9d166a7491c5887e (patch)
treefba5e7aa02cbc10ed64b022732ce4fb004e1ea80
parent3ceee5572fc7be690d3009f4d43af9e4611c0fa1 (diff)
downloadspark-392bd19d678567751cd3844d9d166a7491c5887e.tar.gz
spark-392bd19d678567751cd3844d9d166a7491c5887e.tar.bz2
spark-392bd19d678567751cd3844d9d166a7491c5887e.zip
[SPARK-9967] [SPARK-10099] [STREAMING] Renamed conf spark.streaming.backpressure.{enable-->enabled} and fixed deprecated annotations
Small changes - Renamed conf spark.streaming.backpressure.{enable --> enabled} - Change Java Deprecated annotations to Scala deprecated annotation with more information. Author: Tathagata Das <tathagata.das1565@gmail.com> Closes #8299 from tdas/SPARK-9967. (cherry picked from commit bc9a0e03235865d2ec33372f6400dec8c770778a) Signed-off-by: Tathagata Das <tathagata.das1565@gmail.com>
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala2
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala4
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/scheduler/RateController.scala8
4 files changed, 8 insertions, 8 deletions
diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala b/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
index 829fae1d1d..c582488f16 100644
--- a/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
@@ -354,7 +354,7 @@ trait JavaRDDLike[T, This <: JavaRDDLike[T, This]] extends Serializable {
* Return an array that contains all of the elements in this RDD.
* @deprecated As of Spark 1.0.0, toArray() is deprecated, use {@link #collect()} instead
*/
- @Deprecated
+ @deprecated("use collect()", "1.0.0")
def toArray(): JList[T] = collect()
/**
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
index 7e9dba42be..dda4216c7e 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
@@ -76,7 +76,7 @@ class SparkHadoopUtil extends Logging {
}
}
- @Deprecated
+ @deprecated("use newConfiguration with SparkConf argument", "1.2.0")
def newConfiguration(): Configuration = newConfiguration(null)
/**
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala
index 808dcc174c..214cd80108 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala
@@ -291,7 +291,7 @@ trait JavaDStreamLike[T, This <: JavaDStreamLike[T, This, R], R <: JavaRDDLike[T
*
* @deprecated As of release 0.9.0, replaced by foreachRDD
*/
- @Deprecated
+ @deprecated("Use foreachRDD", "0.9.0")
def foreach(foreachFunc: JFunction[R, Void]) {
foreachRDD(foreachFunc)
}
@@ -302,7 +302,7 @@ trait JavaDStreamLike[T, This <: JavaDStreamLike[T, This, R], R <: JavaRDDLike[T
*
* @deprecated As of release 0.9.0, replaced by foreachRDD
*/
- @Deprecated
+ @deprecated("Use foreachRDD", "0.9.0")
def foreach(foreachFunc: JFunction2[R, Time, Void]) {
foreachRDD(foreachFunc)
}
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/scheduler/RateController.scala b/streaming/src/main/scala/org/apache/spark/streaming/scheduler/RateController.scala
index 882ca0676b..a46c0c1b25 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/scheduler/RateController.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/scheduler/RateController.scala
@@ -76,9 +76,9 @@ private[streaming] abstract class RateController(val streamUID: Int, rateEstimat
val elements = batchCompleted.batchInfo.streamIdToInputInfo
for {
- processingEnd <- batchCompleted.batchInfo.processingEndTime;
- workDelay <- batchCompleted.batchInfo.processingDelay;
- waitDelay <- batchCompleted.batchInfo.schedulingDelay;
+ processingEnd <- batchCompleted.batchInfo.processingEndTime
+ workDelay <- batchCompleted.batchInfo.processingDelay
+ waitDelay <- batchCompleted.batchInfo.schedulingDelay
elems <- elements.get(streamUID).map(_.numRecords)
} computeAndPublish(processingEnd, elems, workDelay, waitDelay)
}
@@ -86,5 +86,5 @@ private[streaming] abstract class RateController(val streamUID: Int, rateEstimat
object RateController {
def isBackPressureEnabled(conf: SparkConf): Boolean =
- conf.getBoolean("spark.streaming.backpressure.enable", false)
+ conf.getBoolean("spark.streaming.backpressure.enabled", false)
}