aboutsummaryrefslogtreecommitdiff
path: root/examples/src
diff options
context:
space:
mode:
authorwm624@hotmail.com <wm624@hotmail.com>2016-04-27 11:56:57 -0700
committerJoseph K. Bradley <joseph@databricks.com>2016-04-27 11:56:57 -0700
commitc74fd1e546440e604c546894e956c44fb6021156 (patch)
tree6bd3d06327c0bfb9badaaacca225a8a0c2ac928d /examples/src
parent450136ec0dab16a12e514c842f9062a6979ee9aa (diff)
downloadspark-c74fd1e546440e604c546894e956c44fb6021156.tar.gz
spark-c74fd1e546440e604c546894e956c44fb6021156.tar.bz2
spark-c74fd1e546440e604c546894e956c44fb6021156.zip
[SPARK-14937][ML][DOCUMENT] spark.ml LogisticRegression sqlCtx in scala is inconsistent with java and python
## What changes were proposed in this pull request? In spark.ml document, the LogisticRegression scala example uses sqlCtx. It is inconsistent with java and python examples which use sqlContext. In addition, a user can't copy & paste to run the example in spark-shell as sqlCtx doesn't exist in spark-shell while sqlContext exists. Change the scala example referred by the spark.ml example. ## How was this patch tested? Compile the example scala file and it passes compilation. Author: wm624@hotmail.com <wm624@hotmail.com> Closes #12717 from wangmiao1981/doc.
Diffstat (limited to 'examples/src')
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/ml/LinearRegressionWithElasticNetExample.scala4
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionSummaryExample.scala6
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionWithElasticNetExample.scala4
3 files changed, 7 insertions, 7 deletions
diff --git a/examples/src/main/scala/org/apache/spark/examples/ml/LinearRegressionWithElasticNetExample.scala b/examples/src/main/scala/org/apache/spark/examples/ml/LinearRegressionWithElasticNetExample.scala
index c7352b3e7a..f68aef7082 100644
--- a/examples/src/main/scala/org/apache/spark/examples/ml/LinearRegressionWithElasticNetExample.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/ml/LinearRegressionWithElasticNetExample.scala
@@ -29,11 +29,11 @@ object LinearRegressionWithElasticNetExample {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("LinearRegressionWithElasticNetExample")
val sc = new SparkContext(conf)
- val sqlCtx = new SQLContext(sc)
+ val sqlContext = new SQLContext(sc)
// $example on$
// Load training data
- val training = sqlCtx.read.format("libsvm")
+ val training = sqlContext.read.format("libsvm")
.load("data/mllib/sample_linear_regression_data.txt")
val lr = new LinearRegression()
diff --git a/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionSummaryExample.scala b/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionSummaryExample.scala
index 04c60c0c1d..89c5edf1ac 100644
--- a/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionSummaryExample.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionSummaryExample.scala
@@ -30,11 +30,11 @@ object LogisticRegressionSummaryExample {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("LogisticRegressionSummaryExample")
val sc = new SparkContext(conf)
- val sqlCtx = new SQLContext(sc)
- import sqlCtx.implicits._
+ val sqlContext = new SQLContext(sc)
+ import sqlContext.implicits._
// Load training data
- val training = sqlCtx.read.format("libsvm").load("data/mllib/sample_libsvm_data.txt")
+ val training = sqlContext.read.format("libsvm").load("data/mllib/sample_libsvm_data.txt")
val lr = new LogisticRegression()
.setMaxIter(10)
diff --git a/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionWithElasticNetExample.scala b/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionWithElasticNetExample.scala
index f632960f26..6e27571f1d 100644
--- a/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionWithElasticNetExample.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionWithElasticNetExample.scala
@@ -29,11 +29,11 @@ object LogisticRegressionWithElasticNetExample {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("LogisticRegressionWithElasticNetExample")
val sc = new SparkContext(conf)
- val sqlCtx = new SQLContext(sc)
+ val sqlContext = new SQLContext(sc)
// $example on$
// Load training data
- val training = sqlCtx.read.format("libsvm").load("data/mllib/sample_libsvm_data.txt")
+ val training = sqlContext.read.format("libsvm").load("data/mllib/sample_libsvm_data.txt")
val lr = new LogisticRegression()
.setMaxIter(10)