aboutsummaryrefslogtreecommitdiff
path: root/examples
diff options
context:
space:
mode:
authorasmith26 <asmith26@users.noreply.github.com>2017-04-09 07:47:23 +0100
committerSean Owen <sowen@cloudera.com>2017-04-09 07:47:23 +0100
commit34fc48fb5976ede00f3f6d8c4d3eec979e4f4d7f (patch)
treefc29588ec8560bbbe61e71bda3c708b665f67d11 /examples
parente1afc4dcca8ba517f48200c0ecde1152505e41ec (diff)
downloadspark-34fc48fb5976ede00f3f6d8c4d3eec979e4f4d7f.tar.gz
spark-34fc48fb5976ede00f3f6d8c4d3eec979e4f4d7f.tar.bz2
spark-34fc48fb5976ede00f3f6d8c4d3eec979e4f4d7f.zip
[MINOR] Issue: Change "slice" vs "partition" in exception messages (and code?)
## What changes were proposed in this pull request? Came across the term "slice" when running some spark scala code. Consequently, a Google search indicated that "slices" and "partitions" refer to the same things; indeed see: - [This issue](https://issues.apache.org/jira/browse/SPARK-1701) - [This pull request](https://github.com/apache/spark/pull/2305) - [This StackOverflow answer](http://stackoverflow.com/questions/23436640/what-is-the-difference-between-an-rdd-partition-and-a-slice) and [this one](http://stackoverflow.com/questions/24269495/what-are-the-differences-between-slices-and-partitions-of-rdds) Thus this pull request fixes the occurrence of slice I came accross. Nonetheless, [it would appear](https://github.com/apache/spark/search?utf8=%E2%9C%93&q=slice&type=) there are still many references to "slice/slices" - thus I thought I'd raise this Pull Request to address the issue (sorry if this is the wrong place, I'm not too familar with raising apache issues). ## How was this patch tested? (Not tested locally - only a minor exception message change.) Please review http://spark.apache.org/contributing.html before opening a pull request. Author: asmith26 <asmith26@users.noreply.github.com> Closes #17565 from asmith26/master.
Diffstat (limited to 'examples')
-rw-r--r--examples/src/main/java/org/apache/spark/examples/JavaSparkPi.java2
-rw-r--r--examples/src/main/java/org/apache/spark/examples/JavaTC.java2
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/BroadcastTest.scala2
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/MultiBroadcastTest.scala2
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/SparkALS.scala2
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/SparkLR.scala2
6 files changed, 6 insertions, 6 deletions
diff --git a/examples/src/main/java/org/apache/spark/examples/JavaSparkPi.java b/examples/src/main/java/org/apache/spark/examples/JavaSparkPi.java
index cb4b265690..37bd8fffbe 100644
--- a/examples/src/main/java/org/apache/spark/examples/JavaSparkPi.java
+++ b/examples/src/main/java/org/apache/spark/examples/JavaSparkPi.java
@@ -26,7 +26,7 @@ import java.util.List;
/**
* Computes an approximation to pi
- * Usage: JavaSparkPi [slices]
+ * Usage: JavaSparkPi [partitions]
*/
public final class JavaSparkPi {
diff --git a/examples/src/main/java/org/apache/spark/examples/JavaTC.java b/examples/src/main/java/org/apache/spark/examples/JavaTC.java
index bde30b84d6..c9ca9c9b3a 100644
--- a/examples/src/main/java/org/apache/spark/examples/JavaTC.java
+++ b/examples/src/main/java/org/apache/spark/examples/JavaTC.java
@@ -32,7 +32,7 @@ import org.apache.spark.sql.SparkSession;
/**
* Transitive closure on a graph, implemented in Java.
- * Usage: JavaTC [slices]
+ * Usage: JavaTC [partitions]
*/
public final class JavaTC {
diff --git a/examples/src/main/scala/org/apache/spark/examples/BroadcastTest.scala b/examples/src/main/scala/org/apache/spark/examples/BroadcastTest.scala
index 86eed3867c..25718f904c 100644
--- a/examples/src/main/scala/org/apache/spark/examples/BroadcastTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/BroadcastTest.scala
@@ -21,7 +21,7 @@ package org.apache.spark.examples
import org.apache.spark.sql.SparkSession
/**
- * Usage: BroadcastTest [slices] [numElem] [blockSize]
+ * Usage: BroadcastTest [partitions] [numElem] [blockSize]
*/
object BroadcastTest {
def main(args: Array[String]) {
diff --git a/examples/src/main/scala/org/apache/spark/examples/MultiBroadcastTest.scala b/examples/src/main/scala/org/apache/spark/examples/MultiBroadcastTest.scala
index 6495a86fcd..e6f33b7adf 100644
--- a/examples/src/main/scala/org/apache/spark/examples/MultiBroadcastTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/MultiBroadcastTest.scala
@@ -23,7 +23,7 @@ import org.apache.spark.sql.SparkSession
/**
- * Usage: MultiBroadcastTest [slices] [numElem]
+ * Usage: MultiBroadcastTest [partitions] [numElem]
*/
object MultiBroadcastTest {
def main(args: Array[String]) {
diff --git a/examples/src/main/scala/org/apache/spark/examples/SparkALS.scala b/examples/src/main/scala/org/apache/spark/examples/SparkALS.scala
index 8a3d08f459..a99ddd9fd3 100644
--- a/examples/src/main/scala/org/apache/spark/examples/SparkALS.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SparkALS.scala
@@ -100,7 +100,7 @@ object SparkALS {
ITERATIONS = iters.getOrElse("5").toInt
slices = slices_.getOrElse("2").toInt
case _ =>
- System.err.println("Usage: SparkALS [M] [U] [F] [iters] [slices]")
+ System.err.println("Usage: SparkALS [M] [U] [F] [iters] [partitions]")
System.exit(1)
}
diff --git a/examples/src/main/scala/org/apache/spark/examples/SparkLR.scala b/examples/src/main/scala/org/apache/spark/examples/SparkLR.scala
index afa8f58c96..cb2be091ff 100644
--- a/examples/src/main/scala/org/apache/spark/examples/SparkLR.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SparkLR.scala
@@ -28,7 +28,7 @@ import org.apache.spark.sql.SparkSession
/**
* Logistic regression based classification.
- * Usage: SparkLR [slices]
+ * Usage: SparkLR [partitions]
*
* This is an example implementation for learning how to use Spark. For more conventional use,
* please refer to org.apache.spark.ml.classification.LogisticRegression.