aboutsummaryrefslogtreecommitdiff
path: root/examples/src
diff options
context:
space:
mode:
authorSean Owen <sowen@cloudera.com>2016-07-30 04:42:38 -0700
committerSean Owen <sowen@cloudera.com>2016-07-30 04:42:38 -0700
commit0dc4310b470c7e4355c0da67ca3373c3013cc9dd (patch)
tree9a8ac5aefbb25188958e9ae028c7ffdc117b705a /examples/src
parentbbc247548ac6faeca15afc05c266cee37ef13416 (diff)
downloadspark-0dc4310b470c7e4355c0da67ca3373c3013cc9dd.tar.gz
spark-0dc4310b470c7e4355c0da67ca3373c3013cc9dd.tar.bz2
spark-0dc4310b470c7e4355c0da67ca3373c3013cc9dd.zip
[SPARK-16694][CORE] Use for/foreach rather than map for Unit expressions whose side effects are required
## What changes were proposed in this pull request? Use foreach/for instead of map where operation requires execution of body, not actually defining a transformation ## How was this patch tested? Jenkins Author: Sean Owen <sowen@cloudera.com> Closes #14332 from srowen/SPARK-16694.
Diffstat (limited to 'examples/src')
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/ml/DataFrameExample.scala9
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/ml/DecisionTreeExample.scala9
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/ml/GBTExample.scala9
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/ml/LinearRegressionExample.scala9
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionExample.scala9
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/ml/RandomForestExample.scala9
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/mllib/BinaryClassification.scala9
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/mllib/Correlations.scala9
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/mllib/CosineSimilarity.scala9
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/mllib/DecisionTreeRunner.scala9
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/mllib/DenseKMeans.scala9
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/mllib/FPGrowthExample.scala9
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/mllib/GradientBoostedTreesRunner.scala9
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/mllib/LDAExample.scala10
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/mllib/LinearRegression.scala9
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala9
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/mllib/MultivariateSummarizer.scala9
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/mllib/PowerIterationClusteringExample.scala9
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/mllib/SampledRDDs.scala9
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/mllib/SparseNaiveBayes.scala9
20 files changed, 80 insertions, 101 deletions
diff --git a/examples/src/main/scala/org/apache/spark/examples/ml/DataFrameExample.scala b/examples/src/main/scala/org/apache/spark/examples/ml/DataFrameExample.scala
index 38c1c1c186..e07c9a4717 100644
--- a/examples/src/main/scala/org/apache/spark/examples/ml/DataFrameExample.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/ml/DataFrameExample.scala
@@ -54,14 +54,13 @@ object DataFrameExample {
}
}
- parser.parse(args, defaultParams).map { params =>
- run(params)
- }.getOrElse {
- sys.exit(1)
+ parser.parse(args, defaultParams) match {
+ case Some(params) => run(params)
+ case _ => sys.exit(1)
}
}
- def run(params: Params) {
+ def run(params: Params): Unit = {
val spark = SparkSession
.builder
.appName(s"DataFrameExample with $params")
diff --git a/examples/src/main/scala/org/apache/spark/examples/ml/DecisionTreeExample.scala b/examples/src/main/scala/org/apache/spark/examples/ml/DecisionTreeExample.scala
index de4474555d..1745281c26 100644
--- a/examples/src/main/scala/org/apache/spark/examples/ml/DecisionTreeExample.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/ml/DecisionTreeExample.scala
@@ -124,10 +124,9 @@ object DecisionTreeExample {
}
}
- parser.parse(args, defaultParams).map { params =>
- run(params)
- }.getOrElse {
- sys.exit(1)
+ parser.parse(args, defaultParams) match {
+ case Some(params) => run(params)
+ case _ => sys.exit(1)
}
}
@@ -197,7 +196,7 @@ object DecisionTreeExample {
(training, test)
}
- def run(params: Params) {
+ def run(params: Params): Unit = {
val spark = SparkSession
.builder
.appName(s"DecisionTreeExample with $params")
diff --git a/examples/src/main/scala/org/apache/spark/examples/ml/GBTExample.scala b/examples/src/main/scala/org/apache/spark/examples/ml/GBTExample.scala
index a4274ae954..db55298d8e 100644
--- a/examples/src/main/scala/org/apache/spark/examples/ml/GBTExample.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/ml/GBTExample.scala
@@ -127,14 +127,13 @@ object GBTExample {
}
}
- parser.parse(args, defaultParams).map { params =>
- run(params)
- }.getOrElse {
- sys.exit(1)
+ parser.parse(args, defaultParams) match {
+ case Some(params) => run(params)
+ case _ => sys.exit(1)
}
}
- def run(params: Params) {
+ def run(params: Params): Unit = {
val spark = SparkSession
.builder
.appName(s"GBTExample with $params")
diff --git a/examples/src/main/scala/org/apache/spark/examples/ml/LinearRegressionExample.scala b/examples/src/main/scala/org/apache/spark/examples/ml/LinearRegressionExample.scala
index de96fb2979..31ba180335 100644
--- a/examples/src/main/scala/org/apache/spark/examples/ml/LinearRegressionExample.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/ml/LinearRegressionExample.scala
@@ -96,14 +96,13 @@ object LinearRegressionExample {
}
}
- parser.parse(args, defaultParams).map { params =>
- run(params)
- }.getOrElse {
- sys.exit(1)
+ parser.parse(args, defaultParams) match {
+ case Some(params) => run(params)
+ case _ => sys.exit(1)
}
}
- def run(params: Params) {
+ def run(params: Params): Unit = {
val spark = SparkSession
.builder
.appName(s"LinearRegressionExample with $params")
diff --git a/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionExample.scala b/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionExample.scala
index c2a87e1ddf..c67b53899c 100644
--- a/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionExample.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionExample.scala
@@ -103,14 +103,13 @@ object LogisticRegressionExample {
}
}
- parser.parse(args, defaultParams).map { params =>
- run(params)
- }.getOrElse {
- sys.exit(1)
+ parser.parse(args, defaultParams) match {
+ case Some(params) => run(params)
+ case _ => sys.exit(1)
}
}
- def run(params: Params) {
+ def run(params: Params): Unit = {
val spark = SparkSession
.builder
.appName(s"LogisticRegressionExample with $params")
diff --git a/examples/src/main/scala/org/apache/spark/examples/ml/RandomForestExample.scala b/examples/src/main/scala/org/apache/spark/examples/ml/RandomForestExample.scala
index 2419dc49cd..a9e07c0705 100644
--- a/examples/src/main/scala/org/apache/spark/examples/ml/RandomForestExample.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/ml/RandomForestExample.scala
@@ -133,14 +133,13 @@ object RandomForestExample {
}
}
- parser.parse(args, defaultParams).map { params =>
- run(params)
- }.getOrElse {
- sys.exit(1)
+ parser.parse(args, defaultParams) match {
+ case Some(params) => run(params)
+ case _ => sys.exit(1)
}
}
- def run(params: Params) {
+ def run(params: Params): Unit = {
val spark = SparkSession
.builder
.appName(s"RandomForestExample with $params")
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/BinaryClassification.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/BinaryClassification.scala
index 2282bd2b7d..a1a5b59152 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/BinaryClassification.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/BinaryClassification.scala
@@ -95,14 +95,13 @@ object BinaryClassification {
""".stripMargin)
}
- parser.parse(args, defaultParams).map { params =>
- run(params)
- } getOrElse {
- sys.exit(1)
+ parser.parse(args, defaultParams) match {
+ case Some(params) => run(params)
+ case _ => sys.exit(1)
}
}
- def run(params: Params) {
+ def run(params: Params): Unit = {
val conf = new SparkConf().setAppName(s"BinaryClassification with $params")
val sc = new SparkContext(conf)
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/Correlations.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/Correlations.scala
index e003f35ed3..0b44c339ef 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/Correlations.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/Correlations.scala
@@ -56,14 +56,13 @@ object Correlations {
""".stripMargin)
}
- parser.parse(args, defaultParams).map { params =>
- run(params)
- } getOrElse {
- sys.exit(1)
+ parser.parse(args, defaultParams) match {
+ case Some(params) => run(params)
+ case _ => sys.exit(1)
}
}
- def run(params: Params) {
+ def run(params: Params): Unit = {
val conf = new SparkConf().setAppName(s"Correlations with $params")
val sc = new SparkContext(conf)
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/CosineSimilarity.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/CosineSimilarity.scala
index 5ff3d36242..681465d217 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/CosineSimilarity.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/CosineSimilarity.scala
@@ -68,14 +68,13 @@ object CosineSimilarity {
""".stripMargin)
}
- parser.parse(args, defaultParams).map { params =>
- run(params)
- } getOrElse {
- System.exit(1)
+ parser.parse(args, defaultParams) match {
+ case Some(params) => run(params)
+ case _ => sys.exit(1)
}
}
- def run(params: Params) {
+ def run(params: Params): Unit = {
val conf = new SparkConf().setAppName("CosineSimilarity")
val sc = new SparkContext(conf)
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/DecisionTreeRunner.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/DecisionTreeRunner.scala
index a85aa2cac9..0ad0465a02 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/DecisionTreeRunner.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/DecisionTreeRunner.scala
@@ -149,10 +149,9 @@ object DecisionTreeRunner {
}
}
- parser.parse(args, defaultParams).map { params =>
- run(params)
- }.getOrElse {
- sys.exit(1)
+ parser.parse(args, defaultParams) match {
+ case Some(params) => run(params)
+ case _ => sys.exit(1)
}
}
@@ -253,7 +252,7 @@ object DecisionTreeRunner {
(training, test, numClasses)
}
- def run(params: Params) {
+ def run(params: Params): Unit = {
val conf = new SparkConf().setAppName(s"DecisionTreeRunner with $params")
val sc = new SparkContext(conf)
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/DenseKMeans.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/DenseKMeans.scala
index 380d85d60e..b228827e58 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/DenseKMeans.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/DenseKMeans.scala
@@ -69,14 +69,13 @@ object DenseKMeans {
.action((x, c) => c.copy(input = x))
}
- parser.parse(args, defaultParams).map { params =>
- run(params)
- }.getOrElse {
- sys.exit(1)
+ parser.parse(args, defaultParams) match {
+ case Some(params) => run(params)
+ case _ => sys.exit(1)
}
}
- def run(params: Params) {
+ def run(params: Params): Unit = {
val conf = new SparkConf().setAppName(s"DenseKMeans with $params")
val sc = new SparkContext(conf)
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/FPGrowthExample.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/FPGrowthExample.scala
index a7a3eade04..6435abc127 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/FPGrowthExample.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/FPGrowthExample.scala
@@ -53,14 +53,13 @@ object FPGrowthExample {
.action((x, c) => c.copy(input = x))
}
- parser.parse(args, defaultParams).map { params =>
- run(params)
- }.getOrElse {
- sys.exit(1)
+ parser.parse(args, defaultParams) match {
+ case Some(params) => run(params)
+ case _ => sys.exit(1)
}
}
- def run(params: Params) {
+ def run(params: Params): Unit = {
val conf = new SparkConf().setAppName(s"FPGrowthExample with $params")
val sc = new SparkContext(conf)
val transactions = sc.textFile(params.input).map(_.split(" ")).cache()
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/GradientBoostedTreesRunner.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/GradientBoostedTreesRunner.scala
index 90e4687c1f..4020c6b6bc 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/GradientBoostedTreesRunner.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/GradientBoostedTreesRunner.scala
@@ -85,14 +85,13 @@ object GradientBoostedTreesRunner {
}
}
- parser.parse(args, defaultParams).map { params =>
- run(params)
- }.getOrElse {
- sys.exit(1)
+ parser.parse(args, defaultParams) match {
+ case Some(params) => run(params)
+ case _ => sys.exit(1)
}
}
- def run(params: Params) {
+ def run(params: Params): Unit = {
val conf = new SparkConf().setAppName(s"GradientBoostedTreesRunner with $params")
val sc = new SparkContext(conf)
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/LDAExample.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/LDAExample.scala
index 3fbf8e0333..7e50b122e6 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/LDAExample.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/LDAExample.scala
@@ -98,15 +98,13 @@ object LDAExample {
.action((x, c) => c.copy(input = c.input :+ x))
}
- parser.parse(args, defaultParams).map { params =>
- run(params)
- }.getOrElse {
- parser.showUsageAsError
- sys.exit(1)
+ parser.parse(args, defaultParams) match {
+ case Some(params) => run(params)
+ case _ => sys.exit(1)
}
}
- private def run(params: Params) {
+ private def run(params: Params): Unit = {
val conf = new SparkConf().setAppName(s"LDAExample with $params")
val sc = new SparkContext(conf)
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/LinearRegression.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/LinearRegression.scala
index a70203028c..86aec363ea 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/LinearRegression.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/LinearRegression.scala
@@ -82,14 +82,13 @@ object LinearRegression {
""".stripMargin)
}
- parser.parse(args, defaultParams).map { params =>
- run(params)
- } getOrElse {
- sys.exit(1)
+ parser.parse(args, defaultParams) match {
+ case Some(params) => run(params)
+ case _ => sys.exit(1)
}
}
- def run(params: Params) {
+ def run(params: Params): Unit = {
val conf = new SparkConf().setAppName(s"LinearRegression with $params")
val sc = new SparkContext(conf)
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala
index 09750e53cb..9bd6927fb7 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala
@@ -89,14 +89,13 @@ object MovieLensALS {
""".stripMargin)
}
- parser.parse(args, defaultParams).map { params =>
- run(params)
- } getOrElse {
- System.exit(1)
+ parser.parse(args, defaultParams) match {
+ case Some(params) => run(params)
+ case _ => sys.exit(1)
}
}
- def run(params: Params) {
+ def run(params: Params): Unit = {
val conf = new SparkConf().setAppName(s"MovieLensALS with $params")
if (params.kryo) {
conf.registerKryoClasses(Array(classOf[mutable.BitSet], classOf[Rating]))
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/MultivariateSummarizer.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/MultivariateSummarizer.scala
index 3c598172da..f9e47e485e 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/MultivariateSummarizer.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/MultivariateSummarizer.scala
@@ -57,14 +57,13 @@ object MultivariateSummarizer {
""".stripMargin)
}
- parser.parse(args, defaultParams).map { params =>
- run(params)
- } getOrElse {
- sys.exit(1)
+ parser.parse(args, defaultParams) match {
+ case Some(params) => run(params)
+ case _ => sys.exit(1)
}
}
- def run(params: Params) {
+ def run(params: Params): Unit = {
val conf = new SparkConf().setAppName(s"MultivariateSummarizer with $params")
val sc = new SparkContext(conf)
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/PowerIterationClusteringExample.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/PowerIterationClusteringExample.scala
index a81c9b383d..986496c0d9 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/PowerIterationClusteringExample.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/PowerIterationClusteringExample.scala
@@ -77,14 +77,13 @@ object PowerIterationClusteringExample {
.action((x, c) => c.copy(maxIterations = x))
}
- parser.parse(args, defaultParams).map { params =>
- run(params)
- }.getOrElse {
- sys.exit(1)
+ parser.parse(args, defaultParams) match {
+ case Some(params) => run(params)
+ case _ => sys.exit(1)
}
}
- def run(params: Params) {
+ def run(params: Params): Unit = {
val conf = new SparkConf()
.setMaster("local")
.setAppName(s"PowerIterationClustering with $params")
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/SampledRDDs.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/SampledRDDs.scala
index 0da4005977..ba3deae5d6 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/SampledRDDs.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/SampledRDDs.scala
@@ -52,14 +52,13 @@ object SampledRDDs {
""".stripMargin)
}
- parser.parse(args, defaultParams).map { params =>
- run(params)
- } getOrElse {
- sys.exit(1)
+ parser.parse(args, defaultParams) match {
+ case Some(params) => run(params)
+ case _ => sys.exit(1)
}
}
- def run(params: Params) {
+ def run(params: Params): Unit = {
val conf = new SparkConf().setAppName(s"SampledRDDs with $params")
val sc = new SparkContext(conf)
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/SparseNaiveBayes.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/SparseNaiveBayes.scala
index f81fc292a3..b76add2f9b 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/SparseNaiveBayes.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/SparseNaiveBayes.scala
@@ -60,14 +60,13 @@ object SparseNaiveBayes {
.action((x, c) => c.copy(input = x))
}
- parser.parse(args, defaultParams).map { params =>
- run(params)
- }.getOrElse {
- sys.exit(1)
+ parser.parse(args, defaultParams) match {
+ case Some(params) => run(params)
+ case _ => sys.exit(1)
}
}
- def run(params: Params) {
+ def run(params: Params): Unit = {
val conf = new SparkConf().setAppName(s"SparseNaiveBayes with $params")
val sc = new SparkContext(conf)