aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorAnant <anant.asty@gmail.com>2014-06-20 18:54:00 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-06-20 18:57:24 -0700
commit010c460d627c1917dc47b09e59fd41172bbf90b3 (patch)
tree73fd45ff574c2a335198ca50d316588397eedd84 /core
parenta6786424954218fc31d8cc638a5c7d6567b55047 (diff)
downloadspark-010c460d627c1917dc47b09e59fd41172bbf90b3.tar.gz
spark-010c460d627c1917dc47b09e59fd41172bbf90b3.tar.bz2
spark-010c460d627c1917dc47b09e59fd41172bbf90b3.zip
[SPARK-2061] Made splits deprecated in JavaRDDLike
The jira for the issue can be found at: https://issues.apache.org/jira/browse/SPARK-2061 Most of spark has used over to consistently using `partitions` instead of `splits`. We should do likewise and add a `partitions` method to JavaRDDLike and have `splits` just call that. We should also go through all cases where other API's (e.g. Python) call `splits` and we should change those to use the newer API. Author: Anant <anant.asty@gmail.com> Closes #1062 from anantasty/SPARK-2061 and squashes the following commits: b83ce6b [Anant] Fixed syntax issue 21f9210 [Anant] Fixed version number in deprecation string 9315b76 [Anant] made related changes to use partitions in python api 8c62dd1 [Anant] Made splits deprecated in JavaRDDLike
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala5
-rw-r--r--core/src/test/java/org/apache/spark/JavaAPISuite.java2
2 files changed, 5 insertions, 2 deletions
diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala b/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
index 330569a8d8..f917cfd141 100644
--- a/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
@@ -43,8 +43,11 @@ trait JavaRDDLike[T, This <: JavaRDDLike[T, This]] extends Serializable {
def rdd: RDD[T]
- /** Set of partitions in this RDD. */
+ @deprecated("Use partitions() instead.", "1.1.0")
def splits: JList[Partition] = new java.util.ArrayList(rdd.partitions.toSeq)
+
+ /** Set of partitions in this RDD. */
+ def partitions: JList[Partition] = new java.util.ArrayList(rdd.partitions.toSeq)
/** The [[org.apache.spark.SparkContext]] that this RDD was created on. */
def context: SparkContext = rdd.context
diff --git a/core/src/test/java/org/apache/spark/JavaAPISuite.java b/core/src/test/java/org/apache/spark/JavaAPISuite.java
index 761f2d6a77..1d7a7be6cf 100644
--- a/core/src/test/java/org/apache/spark/JavaAPISuite.java
+++ b/core/src/test/java/org/apache/spark/JavaAPISuite.java
@@ -741,7 +741,7 @@ public class JavaAPISuite implements Serializable {
public void iterator() {
JavaRDD<Integer> rdd = sc.parallelize(Arrays.asList(1, 2, 3, 4, 5), 2);
TaskContext context = new TaskContext(0, 0, 0, false, new TaskMetrics());
- Assert.assertEquals(1, rdd.iterator(rdd.splits().get(0), context).next().intValue());
+ Assert.assertEquals(1, rdd.iterator(rdd.partitions().get(0), context).next().intValue());
}
@Test