aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala2
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala4
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/scheduler/RateController.scala8
4 files changed, 8 insertions, 8 deletions
diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala b/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
index 829fae1d1d..c582488f16 100644
--- a/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
@@ -354,7 +354,7 @@ trait JavaRDDLike[T, This <: JavaRDDLike[T, This]] extends Serializable {
* Return an array that contains all of the elements in this RDD.
* @deprecated As of Spark 1.0.0, toArray() is deprecated, use {@link #collect()} instead
*/
- @Deprecated
+ @deprecated("use collect()", "1.0.0")
def toArray(): JList[T] = collect()
/**
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
index 7e9dba42be..dda4216c7e 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
@@ -76,7 +76,7 @@ class SparkHadoopUtil extends Logging {
}
}
- @Deprecated
+ @deprecated("use newConfiguration with SparkConf argument", "1.2.0")
def newConfiguration(): Configuration = newConfiguration(null)
/**
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala
index 808dcc174c..214cd80108 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala
@@ -291,7 +291,7 @@ trait JavaDStreamLike[T, This <: JavaDStreamLike[T, This, R], R <: JavaRDDLike[T
*
* @deprecated As of release 0.9.0, replaced by foreachRDD
*/
- @Deprecated
+ @deprecated("Use foreachRDD", "0.9.0")
def foreach(foreachFunc: JFunction[R, Void]) {
foreachRDD(foreachFunc)
}
@@ -302,7 +302,7 @@ trait JavaDStreamLike[T, This <: JavaDStreamLike[T, This, R], R <: JavaRDDLike[T
*
* @deprecated As of release 0.9.0, replaced by foreachRDD
*/
- @Deprecated
+ @deprecated("Use foreachRDD", "0.9.0")
def foreach(foreachFunc: JFunction2[R, Time, Void]) {
foreachRDD(foreachFunc)
}
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/scheduler/RateController.scala b/streaming/src/main/scala/org/apache/spark/streaming/scheduler/RateController.scala
index 882ca0676b..a46c0c1b25 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/scheduler/RateController.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/scheduler/RateController.scala
@@ -76,9 +76,9 @@ private[streaming] abstract class RateController(val streamUID: Int, rateEstimat
val elements = batchCompleted.batchInfo.streamIdToInputInfo
for {
- processingEnd <- batchCompleted.batchInfo.processingEndTime;
- workDelay <- batchCompleted.batchInfo.processingDelay;
- waitDelay <- batchCompleted.batchInfo.schedulingDelay;
+ processingEnd <- batchCompleted.batchInfo.processingEndTime
+ workDelay <- batchCompleted.batchInfo.processingDelay
+ waitDelay <- batchCompleted.batchInfo.schedulingDelay
elems <- elements.get(streamUID).map(_.numRecords)
} computeAndPublish(processingEnd, elems, workDelay, waitDelay)
}
@@ -86,5 +86,5 @@ private[streaming] abstract class RateController(val streamUID: Int, rateEstimat
object RateController {
def isBackPressureEnabled(conf: SparkConf): Boolean =
- conf.getBoolean("spark.streaming.backpressure.enable", false)
+ conf.getBoolean("spark.streaming.backpressure.enabled", false)
}