aboutsummaryrefslogtreecommitdiff
path: root/streaming
diff options
context:
space:
mode:
authorMatei Zaharia <matei@eecs.berkeley.edu>2013-02-25 15:53:43 -0800
committerMatei Zaharia <matei@eecs.berkeley.edu>2013-02-25 15:53:43 -0800
commit4d480ec59e8cf268054ed805abcd1e84eca17b41 (patch)
tree0222c7eb9ed3bd40c4c826413a919acbf1552f28 /streaming
parent6494cab19dda6cb44af506bd86cc75990ce87f70 (diff)
downloadspark-4d480ec59e8cf268054ed805abcd1e84eca17b41.tar.gz
spark-4d480ec59e8cf268054ed805abcd1e84eca17b41.tar.bz2
spark-4d480ec59e8cf268054ed805abcd1e84eca17b41.zip
Fixed something that was reported as a compile error in ScalaDoc.
For some reason, ScalaDoc complained about no such constructor for StreamingContext; it doesn't seem like an actual Scala error but it prevented sbt publish and from working because docs weren't built.
Diffstat (limited to 'streaming')
-rw-r--r--streaming/src/main/scala/spark/streaming/api/java/JavaStreamingContext.scala4
-rw-r--r--streaming/src/main/scala/spark/streaming/util/MasterFailureTest.scala2
2 files changed, 3 insertions, 3 deletions
diff --git a/streaming/src/main/scala/spark/streaming/api/java/JavaStreamingContext.scala b/streaming/src/main/scala/spark/streaming/api/java/JavaStreamingContext.scala
index b528ebbc19..755407aecc 100644
--- a/streaming/src/main/scala/spark/streaming/api/java/JavaStreamingContext.scala
+++ b/streaming/src/main/scala/spark/streaming/api/java/JavaStreamingContext.scala
@@ -41,7 +41,7 @@ class JavaStreamingContext(val ssc: StreamingContext) {
* @param batchDuration The time interval at which streaming data will be divided into batches
*/
def this(master: String, appName: String, batchDuration: Duration) =
- this(new StreamingContext(master, appName, batchDuration))
+ this(new StreamingContext(master, appName, batchDuration, null, Nil, Map()))
/**
* Creates a StreamingContext.
@@ -58,7 +58,7 @@ class JavaStreamingContext(val ssc: StreamingContext) {
batchDuration: Duration,
sparkHome: String,
jars: Array[String]) =
- this(new StreamingContext(master, appName, batchDuration, sparkHome, jars))
+ this(new StreamingContext(master, appName, batchDuration, sparkHome, jars, Map()))
/**
* Creates a StreamingContext.
diff --git a/streaming/src/main/scala/spark/streaming/util/MasterFailureTest.scala b/streaming/src/main/scala/spark/streaming/util/MasterFailureTest.scala
index bdd9f4d753..f673e5be15 100644
--- a/streaming/src/main/scala/spark/streaming/util/MasterFailureTest.scala
+++ b/streaming/src/main/scala/spark/streaming/util/MasterFailureTest.scala
@@ -159,7 +159,7 @@ object MasterFailureTest extends Logging {
// Setup the streaming computation with the given operation
System.clearProperty("spark.driver.port")
- var ssc = new StreamingContext("local[4]", "MasterFailureTest", batchDuration)
+ var ssc = new StreamingContext("local[4]", "MasterFailureTest", batchDuration, null, Nil, Map())
ssc.checkpoint(checkpointDir.toString)
val inputStream = ssc.textFileStream(testDir.toString)
val operatedStream = operation(inputStream)