aboutsummaryrefslogtreecommitdiff
path: root/streaming
diff options
context:
space:
mode:
authorAnand Avati <avati@redhat.com>2014-08-15 08:53:52 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-08-15 08:53:52 -0700
commit7589c39d39a8d0744fb689e5752ee8e0108a81eb (patch)
tree76f6a814d9fe85f0fdf36fd44c675f26a228f3f9 /streaming
parentfba8ec39ccf455a4a03504445bad9af420915b4f (diff)
downloadspark-7589c39d39a8d0744fb689e5752ee8e0108a81eb.tar.gz
spark-7589c39d39a8d0744fb689e5752ee8e0108a81eb.tar.bz2
spark-7589c39d39a8d0744fb689e5752ee8e0108a81eb.zip
[SPARK-2924] remove default args to overloaded methods
Not supported in Scala 2.11. Split them into separate methods instead. Author: Anand Avati <avati@redhat.com> Closes #1704 from avati/SPARK-1812-default-args and squashes the following commits: 3e3924a [Anand Avati] SPARK-1812: Add Mima excludes for the broken ABI 901dfc7 [Anand Avati] SPARK-1812: core - Fix overloaded methods with default arguments 07f00af [Anand Avati] SPARK-1812: streaming - Fix overloaded methods with default arguments
Diffstat (limited to 'streaming')
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala8
1 files changed, 7 insertions, 1 deletions
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala b/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala
index e0677b795c..101cec1c7a 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala
@@ -98,9 +98,15 @@ class StreamingContext private[streaming] (
* @param hadoopConf Optional, configuration object if necessary for reading from
* HDFS compatible filesystems
*/
- def this(path: String, hadoopConf: Configuration = new Configuration) =
+ def this(path: String, hadoopConf: Configuration) =
this(null, CheckpointReader.read(path, new SparkConf(), hadoopConf).get, null)
+ /**
+ * Recreate a StreamingContext from a checkpoint file.
+ * @param path Path to the directory that was specified as the checkpoint directory
+ */
+ def this(path: String) = this(path, new Configuration)
+
if (sc_ == null && cp_ == null) {
throw new Exception("Spark Streaming cannot be initialized with " +
"both SparkContext and checkpoint as null")