diff options
-rw-r--r-- | core/src/main/scala/org/apache/spark/SparkContext.scala | 2 | ||||
-rw-r--r-- | project/MimaExcludes.scala | 3 |
2 files changed, 4 insertions, 1 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index 81a4d0a4d6..c4541aa376 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -1248,7 +1248,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli } /** Get an RDD that has no partitions or elements. */ - def emptyRDD[T: ClassTag]: EmptyRDD[T] = new EmptyRDD[T](this) + def emptyRDD[T: ClassTag]: RDD[T] = new EmptyRDD[T](this) // Methods for creating shared variables diff --git a/project/MimaExcludes.scala b/project/MimaExcludes.scala index a3cfcd20fe..ad878c1892 100644 --- a/project/MimaExcludes.scala +++ b/project/MimaExcludes.scala @@ -34,6 +34,9 @@ import com.typesafe.tools.mima.core.ProblemFilters._ object MimaExcludes { def excludes(version: String) = version match { case v if v.startsWith("2.0") => + Seq( + ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.SparkContext.emptyRDD") + ) ++ // When 1.6 is officially released, update this exclusion list. Seq( MimaBuild.excludeSparkPackage("deploy"), |