aboutsummaryrefslogtreecommitdiff
path: root/python
diff options
context:
space:
mode:
authorYanbo Liang <ybliang8@gmail.com>2015-05-07 11:18:32 -0700
committerXiangrui Meng <meng@databricks.com>2015-05-07 11:18:32 -0700
commit1712a7c7057bf6dd5da8aea1d7fbecdf96ea4b32 (patch)
treea1130a8afd9ca2c9683434791a68a9e2a3379a09 /python
parent068c3158ac0c66e20d90a45e6a2a0b93108e08d5 (diff)
downloadspark-1712a7c7057bf6dd5da8aea1d7fbecdf96ea4b32.tar.gz
spark-1712a7c7057bf6dd5da8aea1d7fbecdf96ea4b32.tar.bz2
spark-1712a7c7057bf6dd5da8aea1d7fbecdf96ea4b32.zip
[SPARK-6093] [MLLIB] Add RegressionMetrics in PySpark/MLlib
https://issues.apache.org/jira/browse/SPARK-6093 Author: Yanbo Liang <ybliang8@gmail.com> Closes #5941 from yanboliang/spark-6093 and squashes the following commits: 6934af3 [Yanbo Liang] change to @property aac3bc5 [Yanbo Liang] Add RegressionMetrics in PySpark/MLlib
Diffstat (limited to 'python')
-rw-r--r--python/pyspark/mllib/evaluation.py78
1 files changed, 76 insertions, 2 deletions
diff --git a/python/pyspark/mllib/evaluation.py b/python/pyspark/mllib/evaluation.py
index 16cb49cc0c..3e11df09da 100644
--- a/python/pyspark/mllib/evaluation.py
+++ b/python/pyspark/mllib/evaluation.py
@@ -27,9 +27,9 @@ class BinaryClassificationMetrics(JavaModelWrapper):
>>> scoreAndLabels = sc.parallelize([
... (0.1, 0.0), (0.1, 1.0), (0.4, 0.0), (0.6, 0.0), (0.6, 1.0), (0.6, 1.0), (0.8, 1.0)], 2)
>>> metrics = BinaryClassificationMetrics(scoreAndLabels)
- >>> metrics.areaUnderROC()
+ >>> metrics.areaUnderROC
0.70...
- >>> metrics.areaUnderPR()
+ >>> metrics.areaUnderPR
0.83...
>>> metrics.unpersist()
"""
@@ -47,6 +47,7 @@ class BinaryClassificationMetrics(JavaModelWrapper):
java_model = java_class(df._jdf)
super(BinaryClassificationMetrics, self).__init__(java_model)
+ @property
def areaUnderROC(self):
"""
Computes the area under the receiver operating characteristic
@@ -54,6 +55,7 @@ class BinaryClassificationMetrics(JavaModelWrapper):
"""
return self.call("areaUnderROC")
+ @property
def areaUnderPR(self):
"""
Computes the area under the precision-recall curve.
@@ -67,6 +69,78 @@ class BinaryClassificationMetrics(JavaModelWrapper):
self.call("unpersist")
+class RegressionMetrics(JavaModelWrapper):
+ """
+ Evaluator for regression.
+
+ >>> predictionAndObservations = sc.parallelize([
+ ... (2.5, 3.0), (0.0, -0.5), (2.0, 2.0), (8.0, 7.0)])
+ >>> metrics = RegressionMetrics(predictionAndObservations)
+ >>> metrics.explainedVariance
+ 0.95...
+ >>> metrics.meanAbsoluteError
+ 0.5...
+ >>> metrics.meanSquaredError
+ 0.37...
+ >>> metrics.rootMeanSquaredError
+ 0.61...
+ >>> metrics.r2
+ 0.94...
+ """
+
+ def __init__(self, predictionAndObservations):
+ """
+ :param predictionAndObservations: an RDD of (prediction, observation) pairs.
+ """
+ sc = predictionAndObservations.ctx
+ sql_ctx = SQLContext(sc)
+ df = sql_ctx.createDataFrame(predictionAndObservations, schema=StructType([
+ StructField("prediction", DoubleType(), nullable=False),
+ StructField("observation", DoubleType(), nullable=False)]))
+ java_class = sc._jvm.org.apache.spark.mllib.evaluation.RegressionMetrics
+ java_model = java_class(df._jdf)
+ super(RegressionMetrics, self).__init__(java_model)
+
+ @property
+ def explainedVariance(self):
+ """
+ Returns the explained variance regression score.
+ explainedVariance = 1 - variance(y - \hat{y}) / variance(y)
+ """
+ return self.call("explainedVariance")
+
+ @property
+ def meanAbsoluteError(self):
+ """
+ Returns the mean absolute error, which is a risk function corresponding to the
+ expected value of the absolute error loss or l1-norm loss.
+ """
+ return self.call("meanAbsoluteError")
+
+ @property
+ def meanSquaredError(self):
+ """
+ Returns the mean squared error, which is a risk function corresponding to the
+ expected value of the squared error loss or quadratic loss.
+ """
+ return self.call("meanSquaredError")
+
+ @property
+ def rootMeanSquaredError(self):
+ """
+ Returns the root mean squared error, which is defined as the square root of
+ the mean squared error.
+ """
+ return self.call("rootMeanSquaredError")
+
+ @property
+ def r2(self):
+ """
+ Returns R^2^, the coefficient of determination.
+ """
+ return self.call("r2")
+
+
def _test():
import doctest
from pyspark import SparkContext