aboutsummaryrefslogtreecommitdiff
path: root/project/MimaBuild.scala
diff options
context:
space:
mode:
authorTathagata Das <tathagata.das1565@gmail.com>2014-04-21 19:04:49 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-04-21 19:04:49 -0700
commit04c37b6f749dc2418cc28c89964cdc687dfcbd51 (patch)
treeba434fee57cba6fe201e83ad9c049fded5e09bc0 /project/MimaBuild.scala
parent5a5b3346c79abb659260284fed0ace51942f3193 (diff)
downloadspark-04c37b6f749dc2418cc28c89964cdc687dfcbd51.tar.gz
spark-04c37b6f749dc2418cc28c89964cdc687dfcbd51.tar.bz2
spark-04c37b6f749dc2418cc28c89964cdc687dfcbd51.zip
[SPARK-1332] Improve Spark Streaming's Network Receiver and InputDStream API [WIP]
The current Network Receiver API makes it slightly complicated to right a new receiver as one needs to create an instance of BlockGenerator as shown in SocketReceiver https://github.com/apache/spark/blob/master/streaming/src/main/scala/org/apache/spark/streaming/dstream/SocketInputDStream.scala#L51 Exposing the BlockGenerator interface has made it harder to improve the receiving process. The API of NetworkReceiver (which was not a very stable API anyways) needs to be change if we are to ensure future stability. Additionally, the functions like streamingContext.socketStream that create input streams, return DStream objects. That makes it hard to expose functionality (say, rate limits) unique to input dstreams. They should return InputDStream or NetworkInputDStream. This is still not yet implemented. This PR is blocked on the graceful shutdown PR #247 Author: Tathagata Das <tathagata.das1565@gmail.com> Closes #300 from tdas/network-receiver-api and squashes the following commits: ea27b38 [Tathagata Das] Merge remote-tracking branch 'apache-github/master' into network-receiver-api 3a4777c [Tathagata Das] Renamed NetworkInputDStream to ReceiverInputDStream, and ActorReceiver related stuff. 838dd39 [Tathagata Das] Added more events to the StreamingListener to report errors and stopped receivers. a75c7a6 [Tathagata Das] Address some PR comments and fixed other issues. 91bfa72 [Tathagata Das] Fixed bugs. 8533094 [Tathagata Das] Scala style fixes. 028bde6 [Tathagata Das] Further refactored receiver to allow restarting of a receiver. 43f5290 [Tathagata Das] Made functions that create input streams return InputDStream and NetworkInputDStream, for both Scala and Java. 2c94579 [Tathagata Das] Fixed graceful shutdown by removing interrupts on receiving thread. 9e37a0b [Tathagata Das] Merge remote-tracking branch 'apache-github/master' into network-receiver-api 3223e95 [Tathagata Das] Refactored the code that runs the NetworkReceiver into further classes and traits to make them more testable. a36cc48 [Tathagata Das] Refactored the NetworkReceiver API for future stability.
Diffstat (limited to 'project/MimaBuild.scala')
-rw-r--r--project/MimaBuild.scala29
1 files changed, 15 insertions, 14 deletions
diff --git a/project/MimaBuild.scala b/project/MimaBuild.scala
index 9cb31d7044..d540dc0a98 100644
--- a/project/MimaBuild.scala
+++ b/project/MimaBuild.scala
@@ -38,6 +38,7 @@ object MimaBuild {
IO.read(excludeFile).split("\n")
}
+ // Exclude a single class and its corresponding object
def excludeClass(className: String) = {
Seq(
excludePackage(className),
@@ -48,7 +49,16 @@ object MimaBuild {
ProblemFilters.exclude[MissingTypesProblem](className + "$")
)
}
- def excludeSparkClass(className: String) = excludeClass("org.apache.spark." + className)
+
+ // Exclude a Spark class, that is in the package org.apache.spark
+ def excludeSparkClass(className: String) = {
+ excludeClass("org.apache.spark." + className)
+ }
+
+ // Exclude a Spark package, that is in the package org.apache.spark
+ def excludeSparkPackage(packageName: String) = {
+ excludePackage("org.apache.spark." + packageName)
+ }
val packagePrivateExcludes = packagePrivateList.flatMap(excludeClass)
@@ -58,10 +68,9 @@ object MimaBuild {
SparkBuild.SPARK_VERSION match {
case v if v.startsWith("1.0") =>
Seq(
- excludePackage("org.apache.spark.api.java"),
- excludePackage("org.apache.spark.streaming.api.java"),
- excludePackage("org.apache.spark.streaming.scheduler"),
- excludePackage("org.apache.spark.mllib")
+ excludeSparkPackage("api.java"),
+ excludeSparkPackage("mllib"),
+ excludeSparkPackage("streaming")
) ++
excludeSparkClass("rdd.ClassTags") ++
excludeSparkClass("util.XORShiftRandom") ++
@@ -69,14 +78,7 @@ object MimaBuild {
excludeSparkClass("mllib.optimization.SquaredGradient") ++
excludeSparkClass("mllib.regression.RidgeRegressionWithSGD") ++
excludeSparkClass("mllib.regression.LassoWithSGD") ++
- excludeSparkClass("mllib.regression.LinearRegressionWithSGD") ++
- excludeSparkClass("streaming.dstream.NetworkReceiver") ++
- excludeSparkClass("streaming.dstream.NetworkReceiver#NetworkReceiverActor") ++
- excludeSparkClass("streaming.dstream.NetworkReceiver#BlockGenerator") ++
- excludeSparkClass("streaming.dstream.NetworkReceiver#BlockGenerator#Block") ++
- excludeSparkClass("streaming.dstream.ReportError") ++
- excludeSparkClass("streaming.dstream.ReportBlock") ++
- excludeSparkClass("streaming.dstream.DStream")
+ excludeSparkClass("mllib.regression.LinearRegressionWithSGD")
case _ => Seq()
}
@@ -87,5 +89,4 @@ object MimaBuild {
previousArtifact := None,
binaryIssueFilters ++= ignoredABIProblems(sparkHome)
)
-
}