aboutsummaryrefslogtreecommitdiff
path: root/python
diff options
context:
space:
mode:
Diffstat (limited to 'python')
-rw-r--r--python/pyspark/ml/classification.py2
-rw-r--r--python/pyspark/ml/regression.py2
2 files changed, 2 insertions, 2 deletions
diff --git a/python/pyspark/ml/classification.py b/python/pyspark/ml/classification.py
index 4a2982e204..5599b8f3ec 100644
--- a/python/pyspark/ml/classification.py
+++ b/python/pyspark/ml/classification.py
@@ -49,7 +49,7 @@ class LogisticRegression(JavaEstimator, HasFeaturesCol, HasLabelCol, HasPredicti
... Row(label=0.0, weight=2.0, features=Vectors.sparse(1, [], []))]).toDF()
>>> lr = LogisticRegression(maxIter=5, regParam=0.01, weightCol="weight")
>>> model = lr.fit(df)
- >>> model.weights
+ >>> model.coefficients
DenseVector([5.5...])
>>> model.intercept
-2.68...
diff --git a/python/pyspark/ml/regression.py b/python/pyspark/ml/regression.py
index 944e648ec8..a0bb8ceed8 100644
--- a/python/pyspark/ml/regression.py
+++ b/python/pyspark/ml/regression.py
@@ -40,7 +40,7 @@ class LinearRegression(JavaEstimator, HasFeaturesCol, HasLabelCol, HasPrediction
Linear regression.
The learning objective is to minimize the squared error, with regularization.
- The specific squared error loss function used is: L = 1/2n ||A weights - y||^2^
+ The specific squared error loss function used is: L = 1/2n ||A coefficients - y||^2^
This support multiple types of regularization:
- none (a.k.a. ordinary least squares)