aboutsummaryrefslogtreecommitdiff
path: root/examples/src
diff options
context:
space:
mode:
Diffstat (limited to 'examples/src')
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/ml/LinearRegressionWithElasticNetExample.scala4
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionSummaryExample.scala6
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionWithElasticNetExample.scala4
3 files changed, 7 insertions, 7 deletions
diff --git a/examples/src/main/scala/org/apache/spark/examples/ml/LinearRegressionWithElasticNetExample.scala b/examples/src/main/scala/org/apache/spark/examples/ml/LinearRegressionWithElasticNetExample.scala
index c7352b3e7a..f68aef7082 100644
--- a/examples/src/main/scala/org/apache/spark/examples/ml/LinearRegressionWithElasticNetExample.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/ml/LinearRegressionWithElasticNetExample.scala
@@ -29,11 +29,11 @@ object LinearRegressionWithElasticNetExample {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("LinearRegressionWithElasticNetExample")
val sc = new SparkContext(conf)
- val sqlCtx = new SQLContext(sc)
+ val sqlContext = new SQLContext(sc)
// $example on$
// Load training data
- val training = sqlCtx.read.format("libsvm")
+ val training = sqlContext.read.format("libsvm")
.load("data/mllib/sample_linear_regression_data.txt")
val lr = new LinearRegression()
diff --git a/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionSummaryExample.scala b/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionSummaryExample.scala
index 04c60c0c1d..89c5edf1ac 100644
--- a/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionSummaryExample.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionSummaryExample.scala
@@ -30,11 +30,11 @@ object LogisticRegressionSummaryExample {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("LogisticRegressionSummaryExample")
val sc = new SparkContext(conf)
- val sqlCtx = new SQLContext(sc)
- import sqlCtx.implicits._
+ val sqlContext = new SQLContext(sc)
+ import sqlContext.implicits._
// Load training data
- val training = sqlCtx.read.format("libsvm").load("data/mllib/sample_libsvm_data.txt")
+ val training = sqlContext.read.format("libsvm").load("data/mllib/sample_libsvm_data.txt")
val lr = new LogisticRegression()
.setMaxIter(10)
diff --git a/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionWithElasticNetExample.scala b/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionWithElasticNetExample.scala
index f632960f26..6e27571f1d 100644
--- a/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionWithElasticNetExample.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionWithElasticNetExample.scala
@@ -29,11 +29,11 @@ object LogisticRegressionWithElasticNetExample {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("LogisticRegressionWithElasticNetExample")
val sc = new SparkContext(conf)
- val sqlCtx = new SQLContext(sc)
+ val sqlContext = new SQLContext(sc)
// $example on$
// Load training data
- val training = sqlCtx.read.format("libsvm").load("data/mllib/sample_libsvm_data.txt")
+ val training = sqlContext.read.format("libsvm").load("data/mllib/sample_libsvm_data.txt")
val lr = new LogisticRegression()
.setMaxIter(10)