aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--mllib/src/main/scala/org/apache/spark/ml/classification/LogisticRegression.scala18
-rw-r--r--mllib/src/main/scala/org/apache/spark/ml/regression/LinearRegression.scala15
2 files changed, 33 insertions, 0 deletions
diff --git a/mllib/src/main/scala/org/apache/spark/ml/classification/LogisticRegression.scala b/mllib/src/main/scala/org/apache/spark/ml/classification/LogisticRegression.scala
index 418bbdc9a0..d320d64dd9 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/classification/LogisticRegression.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/classification/LogisticRegression.scala
@@ -755,23 +755,35 @@ class BinaryLogisticRegressionSummary private[classification] (
* Returns the receiver operating characteristic (ROC) curve,
* which is an Dataframe having two fields (FPR, TPR)
* with (0.0, 0.0) prepended and (1.0, 1.0) appended to it.
+ *
+ * Note: This ignores instance weights (setting all to 1.0) from [[LogisticRegression.weightCol]].
+ * This will change in later Spark versions.
* @see http://en.wikipedia.org/wiki/Receiver_operating_characteristic
*/
@transient lazy val roc: DataFrame = binaryMetrics.roc().toDF("FPR", "TPR")
/**
* Computes the area under the receiver operating characteristic (ROC) curve.
+ *
+ * Note: This ignores instance weights (setting all to 1.0) from [[LogisticRegression.weightCol]].
+ * This will change in later Spark versions.
*/
lazy val areaUnderROC: Double = binaryMetrics.areaUnderROC()
/**
* Returns the precision-recall curve, which is an Dataframe containing
* two fields recall, precision with (0.0, 1.0) prepended to it.
+ *
+ * Note: This ignores instance weights (setting all to 1.0) from [[LogisticRegression.weightCol]].
+ * This will change in later Spark versions.
*/
@transient lazy val pr: DataFrame = binaryMetrics.pr().toDF("recall", "precision")
/**
* Returns a dataframe with two fields (threshold, F-Measure) curve with beta = 1.0.
+ *
+ * Note: This ignores instance weights (setting all to 1.0) from [[LogisticRegression.weightCol]].
+ * This will change in later Spark versions.
*/
@transient lazy val fMeasureByThreshold: DataFrame = {
binaryMetrics.fMeasureByThreshold().toDF("threshold", "F-Measure")
@@ -781,6 +793,9 @@ class BinaryLogisticRegressionSummary private[classification] (
* Returns a dataframe with two fields (threshold, precision) curve.
* Every possible probability obtained in transforming the dataset are used
* as thresholds used in calculating the precision.
+ *
+ * Note: This ignores instance weights (setting all to 1.0) from [[LogisticRegression.weightCol]].
+ * This will change in later Spark versions.
*/
@transient lazy val precisionByThreshold: DataFrame = {
binaryMetrics.precisionByThreshold().toDF("threshold", "precision")
@@ -790,6 +805,9 @@ class BinaryLogisticRegressionSummary private[classification] (
* Returns a dataframe with two fields (threshold, recall) curve.
* Every possible probability obtained in transforming the dataset are used
* as thresholds used in calculating the recall.
+ *
+ * Note: This ignores instance weights (setting all to 1.0) from [[LogisticRegression.weightCol]].
+ * This will change in later Spark versions.
*/
@transient lazy val recallByThreshold: DataFrame = {
binaryMetrics.recallByThreshold().toDF("threshold", "recall")
diff --git a/mllib/src/main/scala/org/apache/spark/ml/regression/LinearRegression.scala b/mllib/src/main/scala/org/apache/spark/ml/regression/LinearRegression.scala
index 70ccec766c..1db91666f2 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/regression/LinearRegression.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/regression/LinearRegression.scala
@@ -540,6 +540,9 @@ class LinearRegressionSummary private[regression] (
* Returns the explained variance regression score.
* explainedVariance = 1 - variance(y - \hat{y}) / variance(y)
* Reference: [[http://en.wikipedia.org/wiki/Explained_variation]]
+ *
+ * Note: This ignores instance weights (setting all to 1.0) from [[LinearRegression.weightCol]].
+ * This will change in later Spark versions.
*/
@Since("1.5.0")
val explainedVariance: Double = metrics.explainedVariance
@@ -547,6 +550,9 @@ class LinearRegressionSummary private[regression] (
/**
* Returns the mean absolute error, which is a risk function corresponding to the
* expected value of the absolute error loss or l1-norm loss.
+ *
+ * Note: This ignores instance weights (setting all to 1.0) from [[LinearRegression.weightCol]].
+ * This will change in later Spark versions.
*/
@Since("1.5.0")
val meanAbsoluteError: Double = metrics.meanAbsoluteError
@@ -554,6 +560,9 @@ class LinearRegressionSummary private[regression] (
/**
* Returns the mean squared error, which is a risk function corresponding to the
* expected value of the squared error loss or quadratic loss.
+ *
+ * Note: This ignores instance weights (setting all to 1.0) from [[LinearRegression.weightCol]].
+ * This will change in later Spark versions.
*/
@Since("1.5.0")
val meanSquaredError: Double = metrics.meanSquaredError
@@ -561,6 +570,9 @@ class LinearRegressionSummary private[regression] (
/**
* Returns the root mean squared error, which is defined as the square root of
* the mean squared error.
+ *
+ * Note: This ignores instance weights (setting all to 1.0) from [[LinearRegression.weightCol]].
+ * This will change in later Spark versions.
*/
@Since("1.5.0")
val rootMeanSquaredError: Double = metrics.rootMeanSquaredError
@@ -568,6 +580,9 @@ class LinearRegressionSummary private[regression] (
/**
* Returns R^2^, the coefficient of determination.
* Reference: [[http://en.wikipedia.org/wiki/Coefficient_of_determination]]
+ *
+ * Note: This ignores instance weights (setting all to 1.0) from [[LinearRegression.weightCol]].
+ * This will change in later Spark versions.
*/
@Since("1.5.0")
val r2: Double = metrics.r2