aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark/ml/param
diff options
context:
space:
mode:
authorYanbo Liang <ybliang8@gmail.com>2015-09-11 08:50:35 -0700
committerXiangrui Meng <meng@databricks.com>2015-09-11 08:50:35 -0700
commitb656e6134fc5cd27e1fe6b6ab30fd7633cab0b14 (patch)
tree10d2d556a148adab585979cc387109588c6fda43 /python/pyspark/ml/param
parentc268ca4ddde2f5213b2e3985dcaaac5900aea71c (diff)
downloadspark-b656e6134fc5cd27e1fe6b6ab30fd7633cab0b14.tar.gz
spark-b656e6134fc5cd27e1fe6b6ab30fd7633cab0b14.tar.bz2
spark-b656e6134fc5cd27e1fe6b6ab30fd7633cab0b14.zip
[SPARK-10026] [ML] [PySpark] Implement some common Params for regression in PySpark
LinearRegression and LogisticRegression lack of some Params for Python, and some Params are not shared classes which lead we need to write them for each class. These kinds of Params are list here: ```scala HasElasticNetParam HasFitIntercept HasStandardization HasThresholds ``` Here we implement them in shared params at Python side and make LinearRegression/LogisticRegression parameters peer with Scala one. Author: Yanbo Liang <ybliang8@gmail.com> Closes #8508 from yanboliang/spark-10026.
Diffstat (limited to 'python/pyspark/ml/param')
-rw-r--r--python/pyspark/ml/param/_shared_params_code_gen.py11
-rw-r--r--python/pyspark/ml/param/shared.py111
2 files changed, 121 insertions, 1 deletions
diff --git a/python/pyspark/ml/param/_shared_params_code_gen.py b/python/pyspark/ml/param/_shared_params_code_gen.py
index 926375e448..5b39e5dd4e 100644
--- a/python/pyspark/ml/param/_shared_params_code_gen.py
+++ b/python/pyspark/ml/param/_shared_params_code_gen.py
@@ -124,7 +124,16 @@ if __name__ == "__main__":
("stepSize", "Step size to be used for each iteration of optimization.", None),
("handleInvalid", "how to handle invalid entries. Options are skip (which will filter " +
"out rows with bad values), or error (which will throw an errror). More options may be " +
- "added later.", None)]
+ "added later.", None),
+ ("elasticNetParam", "the ElasticNet mixing parameter, in range [0, 1]. For alpha = 0, " +
+ "the penalty is an L2 penalty. For alpha = 1, it is an L1 penalty.", "0.0"),
+ ("fitIntercept", "whether to fit an intercept term.", "True"),
+ ("standardization", "whether to standardize the training features before fitting the " +
+ "model.", "True"),
+ ("thresholds", "Thresholds in multi-class classification to adjust the probability of " +
+ "predicting each class. Array must have length equal to the number of classes, with " +
+ "values >= 0. The class with largest value p/t is predicted, where p is the original " +
+ "probability of that class and t is the class' threshold.", None)]
code = []
for name, doc, defaultValueStr in shared:
param_code = _gen_param_header(name, doc, defaultValueStr)
diff --git a/python/pyspark/ml/param/shared.py b/python/pyspark/ml/param/shared.py
index 682170aee8..af12181286 100644
--- a/python/pyspark/ml/param/shared.py
+++ b/python/pyspark/ml/param/shared.py
@@ -459,6 +459,117 @@ class HasHandleInvalid(Params):
return self.getOrDefault(self.handleInvalid)
+class HasElasticNetParam(Params):
+ """
+ Mixin for param elasticNetParam: the ElasticNet mixing parameter, in range [0, 1]. For alpha = 0, the penalty is an L2 penalty. For alpha = 1, it is an L1 penalty..
+ """
+
+ # a placeholder to make it appear in the generated doc
+ elasticNetParam = Param(Params._dummy(), "elasticNetParam", "the ElasticNet mixing parameter, in range [0, 1]. For alpha = 0, the penalty is an L2 penalty. For alpha = 1, it is an L1 penalty.")
+
+ def __init__(self):
+ super(HasElasticNetParam, self).__init__()
+ #: param for the ElasticNet mixing parameter, in range [0, 1]. For alpha = 0, the penalty is an L2 penalty. For alpha = 1, it is an L1 penalty.
+ self.elasticNetParam = Param(self, "elasticNetParam", "the ElasticNet mixing parameter, in range [0, 1]. For alpha = 0, the penalty is an L2 penalty. For alpha = 1, it is an L1 penalty.")
+ self._setDefault(elasticNetParam=0.0)
+
+ def setElasticNetParam(self, value):
+ """
+ Sets the value of :py:attr:`elasticNetParam`.
+ """
+ self._paramMap[self.elasticNetParam] = value
+ return self
+
+ def getElasticNetParam(self):
+ """
+ Gets the value of elasticNetParam or its default value.
+ """
+ return self.getOrDefault(self.elasticNetParam)
+
+
+class HasFitIntercept(Params):
+ """
+ Mixin for param fitIntercept: whether to fit an intercept term..
+ """
+
+ # a placeholder to make it appear in the generated doc
+ fitIntercept = Param(Params._dummy(), "fitIntercept", "whether to fit an intercept term.")
+
+ def __init__(self):
+ super(HasFitIntercept, self).__init__()
+ #: param for whether to fit an intercept term.
+ self.fitIntercept = Param(self, "fitIntercept", "whether to fit an intercept term.")
+ self._setDefault(fitIntercept=True)
+
+ def setFitIntercept(self, value):
+ """
+ Sets the value of :py:attr:`fitIntercept`.
+ """
+ self._paramMap[self.fitIntercept] = value
+ return self
+
+ def getFitIntercept(self):
+ """
+ Gets the value of fitIntercept or its default value.
+ """
+ return self.getOrDefault(self.fitIntercept)
+
+
+class HasStandardization(Params):
+ """
+ Mixin for param standardization: whether to standardize the training features before fitting the model..
+ """
+
+ # a placeholder to make it appear in the generated doc
+ standardization = Param(Params._dummy(), "standardization", "whether to standardize the training features before fitting the model.")
+
+ def __init__(self):
+ super(HasStandardization, self).__init__()
+ #: param for whether to standardize the training features before fitting the model.
+ self.standardization = Param(self, "standardization", "whether to standardize the training features before fitting the model.")
+ self._setDefault(standardization=True)
+
+ def setStandardization(self, value):
+ """
+ Sets the value of :py:attr:`standardization`.
+ """
+ self._paramMap[self.standardization] = value
+ return self
+
+ def getStandardization(self):
+ """
+ Gets the value of standardization or its default value.
+ """
+ return self.getOrDefault(self.standardization)
+
+
+class HasThresholds(Params):
+ """
+ Mixin for param thresholds: Thresholds in multi-class classification to adjust the probability of predicting each class. Array must have length equal to the number of classes, with values >= 0. The class with largest value p/t is predicted, where p is the original probability of that class and t is the class' threshold..
+ """
+
+ # a placeholder to make it appear in the generated doc
+ thresholds = Param(Params._dummy(), "thresholds", "Thresholds in multi-class classification to adjust the probability of predicting each class. Array must have length equal to the number of classes, with values >= 0. The class with largest value p/t is predicted, where p is the original probability of that class and t is the class' threshold.")
+
+ def __init__(self):
+ super(HasThresholds, self).__init__()
+ #: param for Thresholds in multi-class classification to adjust the probability of predicting each class. Array must have length equal to the number of classes, with values >= 0. The class with largest value p/t is predicted, where p is the original probability of that class and t is the class' threshold.
+ self.thresholds = Param(self, "thresholds", "Thresholds in multi-class classification to adjust the probability of predicting each class. Array must have length equal to the number of classes, with values >= 0. The class with largest value p/t is predicted, where p is the original probability of that class and t is the class' threshold.")
+
+ def setThresholds(self, value):
+ """
+ Sets the value of :py:attr:`thresholds`.
+ """
+ self._paramMap[self.thresholds] = value
+ return self
+
+ def getThresholds(self):
+ """
+ Gets the value of thresholds or its default value.
+ """
+ return self.getOrDefault(self.thresholds)
+
+
class DecisionTreeParams(Params):
"""
Mixin for Decision Tree parameters.