aboutsummaryrefslogtreecommitdiff
path: root/streaming/src
diff options
context:
space:
mode:
authorAnand Avati <avati@redhat.com>2014-08-15 08:53:52 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-08-15 08:54:04 -0700
commitdebb3e3df601bc64c97701565d2c992855f6cce9 (patch)
tree99f205c81ca901364ab956d02ae937b510c381dc /streaming/src
parent3f23d2a38c3b6559902bc2ab6975ff6b0bec875e (diff)
downloadspark-debb3e3df601bc64c97701565d2c992855f6cce9.tar.gz
spark-debb3e3df601bc64c97701565d2c992855f6cce9.tar.bz2
spark-debb3e3df601bc64c97701565d2c992855f6cce9.zip
[SPARK-2924] remove default args to overloaded methods
Not supported in Scala 2.11. Split them into separate methods instead. Author: Anand Avati <avati@redhat.com> Closes #1704 from avati/SPARK-1812-default-args and squashes the following commits: 3e3924a [Anand Avati] SPARK-1812: Add Mima excludes for the broken ABI 901dfc7 [Anand Avati] SPARK-1812: core - Fix overloaded methods with default arguments 07f00af [Anand Avati] SPARK-1812: streaming - Fix overloaded methods with default arguments (cherry picked from commit 7589c39d39a8d0744fb689e5752ee8e0108a81eb) Signed-off-by: Patrick Wendell <pwendell@gmail.com>
Diffstat (limited to 'streaming/src')
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala8
1 files changed, 7 insertions, 1 deletions
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala b/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala
index e0677b795c..101cec1c7a 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala
@@ -98,9 +98,15 @@ class StreamingContext private[streaming] (
* @param hadoopConf Optional, configuration object if necessary for reading from
* HDFS compatible filesystems
*/
- def this(path: String, hadoopConf: Configuration = new Configuration) =
+ def this(path: String, hadoopConf: Configuration) =
this(null, CheckpointReader.read(path, new SparkConf(), hadoopConf).get, null)
+ /**
+ * Recreate a StreamingContext from a checkpoint file.
+ * @param path Path to the directory that was specified as the checkpoint directory
+ */
+ def this(path: String) = this(path, new Configuration)
+
if (sc_ == null && cp_ == null) {
throw new Exception("Spark Streaming cannot be initialized with " +
"both SparkContext and checkpoint as null")