aboutsummaryrefslogtreecommitdiff
path: root/streaming/src
diff options
context:
space:
mode:
authorPatrick Wendell <pwendell@gmail.com>2013-01-14 13:31:49 -0800
committerPatrick Wendell <pwendell@gmail.com>2013-01-14 13:31:49 -0800
commit38d9a3a8630a38aa0cb9e6a13256816cfa9ab5a6 (patch)
tree94e5f66879b1e075c50dfaaf3a0fb17992cc9041 /streaming/src
parentae5290f4a2fbeb51f5dc6e7add38f9c012ab7311 (diff)
downloadspark-38d9a3a8630a38aa0cb9e6a13256816cfa9ab5a6.tar.gz
spark-38d9a3a8630a38aa0cb9e6a13256816cfa9ab5a6.tar.bz2
spark-38d9a3a8630a38aa0cb9e6a13256816cfa9ab5a6.zip
Remove AnyRef constraint in updateState
Diffstat (limited to 'streaming/src')
-rw-r--r--streaming/src/main/scala/spark/streaming/PairDStreamFunctions.scala8
-rw-r--r--streaming/src/main/scala/spark/streaming/dstream/StateDStream.scala2
2 files changed, 5 insertions, 5 deletions
diff --git a/streaming/src/main/scala/spark/streaming/PairDStreamFunctions.scala b/streaming/src/main/scala/spark/streaming/PairDStreamFunctions.scala
index f63279512b..fbcf061126 100644
--- a/streaming/src/main/scala/spark/streaming/PairDStreamFunctions.scala
+++ b/streaming/src/main/scala/spark/streaming/PairDStreamFunctions.scala
@@ -363,7 +363,7 @@ extends Serializable {
* corresponding state key-value pair will be eliminated.
* @tparam S State type
*/
- def updateStateByKey[S <: AnyRef : ClassManifest](
+ def updateStateByKey[S: ClassManifest](
updateFunc: (Seq[V], Option[S]) => Option[S]
): DStream[(K, S)] = {
updateStateByKey(updateFunc, defaultPartitioner())
@@ -378,7 +378,7 @@ extends Serializable {
* @param numPartitions Number of partitions of each RDD in the new DStream.
* @tparam S State type
*/
- def updateStateByKey[S <: AnyRef : ClassManifest](
+ def updateStateByKey[S: ClassManifest](
updateFunc: (Seq[V], Option[S]) => Option[S],
numPartitions: Int
): DStream[(K, S)] = {
@@ -394,7 +394,7 @@ extends Serializable {
* @param partitioner Partitioner for controlling the partitioning of each RDD in the new DStream.
* @tparam S State type
*/
- def updateStateByKey[S <: AnyRef : ClassManifest](
+ def updateStateByKey[S: ClassManifest](
updateFunc: (Seq[V], Option[S]) => Option[S],
partitioner: Partitioner
): DStream[(K, S)] = {
@@ -417,7 +417,7 @@ extends Serializable {
* @param rememberPartitioner Whether to remember the paritioner object in the generated RDDs.
* @tparam S State type
*/
- def updateStateByKey[S <: AnyRef : ClassManifest](
+ def updateStateByKey[S: ClassManifest](
updateFunc: (Iterator[(K, Seq[V], Option[S])]) => Iterator[(K, S)],
partitioner: Partitioner,
rememberPartitioner: Boolean
diff --git a/streaming/src/main/scala/spark/streaming/dstream/StateDStream.scala b/streaming/src/main/scala/spark/streaming/dstream/StateDStream.scala
index a1ec2f5454..b4506c74aa 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/StateDStream.scala
+++ b/streaming/src/main/scala/spark/streaming/dstream/StateDStream.scala
@@ -7,7 +7,7 @@ import spark.storage.StorageLevel
import spark.streaming.{Duration, Time, DStream}
private[streaming]
-class StateDStream[K: ClassManifest, V: ClassManifest, S <: AnyRef : ClassManifest](
+class StateDStream[K: ClassManifest, V: ClassManifest, S: ClassManifest](
parent: DStream[(K, V)],
updateFunc: (Iterator[(K, Seq[V], Option[S])]) => Iterator[(K, S)],
partitioner: Partitioner,