aboutsummaryrefslogtreecommitdiff
path: root/examples
diff options
context:
space:
mode:
Diffstat (limited to 'examples')
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/ml/DecisionTreeExample.scala4
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/ml/GBTExample.scala4
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/ml/RandomForestExample.scala4
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/mllib/LDAExample.scala4
4 files changed, 12 insertions, 4 deletions
diff --git a/examples/src/main/scala/org/apache/spark/examples/ml/DecisionTreeExample.scala b/examples/src/main/scala/org/apache/spark/examples/ml/DecisionTreeExample.scala
index 1745281c26..f736ceed44 100644
--- a/examples/src/main/scala/org/apache/spark/examples/ml/DecisionTreeExample.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/ml/DecisionTreeExample.scala
@@ -18,6 +18,8 @@
// scalastyle:off println
package org.apache.spark.examples.ml
+import java.util.Locale
+
import scala.collection.mutable
import scala.language.reflectiveCalls
@@ -203,7 +205,7 @@ object DecisionTreeExample {
.getOrCreate()
params.checkpointDir.foreach(spark.sparkContext.setCheckpointDir)
- val algo = params.algo.toLowerCase
+ val algo = params.algo.toLowerCase(Locale.ROOT)
println(s"DecisionTreeExample with parameters:\n$params")
diff --git a/examples/src/main/scala/org/apache/spark/examples/ml/GBTExample.scala b/examples/src/main/scala/org/apache/spark/examples/ml/GBTExample.scala
index db55298d8e..ed598d0d7d 100644
--- a/examples/src/main/scala/org/apache/spark/examples/ml/GBTExample.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/ml/GBTExample.scala
@@ -18,6 +18,8 @@
// scalastyle:off println
package org.apache.spark.examples.ml
+import java.util.Locale
+
import scala.collection.mutable
import scala.language.reflectiveCalls
@@ -140,7 +142,7 @@ object GBTExample {
.getOrCreate()
params.checkpointDir.foreach(spark.sparkContext.setCheckpointDir)
- val algo = params.algo.toLowerCase
+ val algo = params.algo.toLowerCase(Locale.ROOT)
println(s"GBTExample with parameters:\n$params")
diff --git a/examples/src/main/scala/org/apache/spark/examples/ml/RandomForestExample.scala b/examples/src/main/scala/org/apache/spark/examples/ml/RandomForestExample.scala
index a9e07c0705..8fd46c37e2 100644
--- a/examples/src/main/scala/org/apache/spark/examples/ml/RandomForestExample.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/ml/RandomForestExample.scala
@@ -18,6 +18,8 @@
// scalastyle:off println
package org.apache.spark.examples.ml
+import java.util.Locale
+
import scala.collection.mutable
import scala.language.reflectiveCalls
@@ -146,7 +148,7 @@ object RandomForestExample {
.getOrCreate()
params.checkpointDir.foreach(spark.sparkContext.setCheckpointDir)
- val algo = params.algo.toLowerCase
+ val algo = params.algo.toLowerCase(Locale.ROOT)
println(s"RandomForestExample with parameters:\n$params")
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/LDAExample.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/LDAExample.scala
index b923e627f2..cd77ecf990 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/LDAExample.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/LDAExample.scala
@@ -18,6 +18,8 @@
// scalastyle:off println
package org.apache.spark.examples.mllib
+import java.util.Locale
+
import org.apache.log4j.{Level, Logger}
import scopt.OptionParser
@@ -131,7 +133,7 @@ object LDAExample {
// Run LDA.
val lda = new LDA()
- val optimizer = params.algorithm.toLowerCase match {
+ val optimizer = params.algorithm.toLowerCase(Locale.ROOT) match {
case "em" => new EMLDAOptimizer
// add (1.0 / actualCorpusSize) to MiniBatchFraction be more robust on tiny datasets.
case "online" => new OnlineLDAOptimizer().setMiniBatchFraction(0.05 + 1.0 / actualCorpusSize)