aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark/ml/param/shared.py
diff options
context:
space:
mode:
Diffstat (limited to 'python/pyspark/ml/param/shared.py')
-rw-r--r--python/pyspark/ml/param/shared.py81
1 files changed, 1 insertions, 80 deletions
diff --git a/python/pyspark/ml/param/shared.py b/python/pyspark/ml/param/shared.py
index 23f9431484..db4a8a54d4 100644
--- a/python/pyspark/ml/param/shared.py
+++ b/python/pyspark/ml/param/shared.py
@@ -25,13 +25,10 @@ class HasMaxIter(Params):
Mixin for param maxIter: max number of iterations (>= 0).
"""
- # a placeholder to make it appear in the generated doc
maxIter = Param(Params._dummy(), "maxIter", "max number of iterations (>= 0).", int)
def __init__(self):
super(HasMaxIter, self).__init__()
- #: param for max number of iterations (>= 0).
- self.maxIter = Param(self, "maxIter", "max number of iterations (>= 0).", int)
def setMaxIter(self, value):
"""
@@ -52,13 +49,10 @@ class HasRegParam(Params):
Mixin for param regParam: regularization parameter (>= 0).
"""
- # a placeholder to make it appear in the generated doc
regParam = Param(Params._dummy(), "regParam", "regularization parameter (>= 0).", float)
def __init__(self):
super(HasRegParam, self).__init__()
- #: param for regularization parameter (>= 0).
- self.regParam = Param(self, "regParam", "regularization parameter (>= 0).", float)
def setRegParam(self, value):
"""
@@ -79,13 +73,10 @@ class HasFeaturesCol(Params):
Mixin for param featuresCol: features column name.
"""
- # a placeholder to make it appear in the generated doc
featuresCol = Param(Params._dummy(), "featuresCol", "features column name.", str)
def __init__(self):
super(HasFeaturesCol, self).__init__()
- #: param for features column name.
- self.featuresCol = Param(self, "featuresCol", "features column name.", str)
self._setDefault(featuresCol='features')
def setFeaturesCol(self, value):
@@ -107,13 +98,10 @@ class HasLabelCol(Params):
Mixin for param labelCol: label column name.
"""
- # a placeholder to make it appear in the generated doc
labelCol = Param(Params._dummy(), "labelCol", "label column name.", str)
def __init__(self):
super(HasLabelCol, self).__init__()
- #: param for label column name.
- self.labelCol = Param(self, "labelCol", "label column name.", str)
self._setDefault(labelCol='label')
def setLabelCol(self, value):
@@ -135,13 +123,10 @@ class HasPredictionCol(Params):
Mixin for param predictionCol: prediction column name.
"""
- # a placeholder to make it appear in the generated doc
predictionCol = Param(Params._dummy(), "predictionCol", "prediction column name.", str)
def __init__(self):
super(HasPredictionCol, self).__init__()
- #: param for prediction column name.
- self.predictionCol = Param(self, "predictionCol", "prediction column name.", str)
self._setDefault(predictionCol='prediction')
def setPredictionCol(self, value):
@@ -163,13 +148,10 @@ class HasProbabilityCol(Params):
Mixin for param probabilityCol: Column name for predicted class conditional probabilities. Note: Not all models output well-calibrated probability estimates! These probabilities should be treated as confidences, not precise probabilities.
"""
- # a placeholder to make it appear in the generated doc
probabilityCol = Param(Params._dummy(), "probabilityCol", "Column name for predicted class conditional probabilities. Note: Not all models output well-calibrated probability estimates! These probabilities should be treated as confidences, not precise probabilities.", str)
def __init__(self):
super(HasProbabilityCol, self).__init__()
- #: param for Column name for predicted class conditional probabilities. Note: Not all models output well-calibrated probability estimates! These probabilities should be treated as confidences, not precise probabilities.
- self.probabilityCol = Param(self, "probabilityCol", "Column name for predicted class conditional probabilities. Note: Not all models output well-calibrated probability estimates! These probabilities should be treated as confidences, not precise probabilities.", str)
self._setDefault(probabilityCol='probability')
def setProbabilityCol(self, value):
@@ -191,13 +173,10 @@ class HasRawPredictionCol(Params):
Mixin for param rawPredictionCol: raw prediction (a.k.a. confidence) column name.
"""
- # a placeholder to make it appear in the generated doc
rawPredictionCol = Param(Params._dummy(), "rawPredictionCol", "raw prediction (a.k.a. confidence) column name.", str)
def __init__(self):
super(HasRawPredictionCol, self).__init__()
- #: param for raw prediction (a.k.a. confidence) column name.
- self.rawPredictionCol = Param(self, "rawPredictionCol", "raw prediction (a.k.a. confidence) column name.", str)
self._setDefault(rawPredictionCol='rawPrediction')
def setRawPredictionCol(self, value):
@@ -219,13 +198,10 @@ class HasInputCol(Params):
Mixin for param inputCol: input column name.
"""
- # a placeholder to make it appear in the generated doc
inputCol = Param(Params._dummy(), "inputCol", "input column name.", str)
def __init__(self):
super(HasInputCol, self).__init__()
- #: param for input column name.
- self.inputCol = Param(self, "inputCol", "input column name.", str)
def setInputCol(self, value):
"""
@@ -246,13 +222,10 @@ class HasInputCols(Params):
Mixin for param inputCols: input column names.
"""
- # a placeholder to make it appear in the generated doc
inputCols = Param(Params._dummy(), "inputCols", "input column names.", None)
def __init__(self):
super(HasInputCols, self).__init__()
- #: param for input column names.
- self.inputCols = Param(self, "inputCols", "input column names.", None)
def setInputCols(self, value):
"""
@@ -273,13 +246,10 @@ class HasOutputCol(Params):
Mixin for param outputCol: output column name.
"""
- # a placeholder to make it appear in the generated doc
outputCol = Param(Params._dummy(), "outputCol", "output column name.", str)
def __init__(self):
super(HasOutputCol, self).__init__()
- #: param for output column name.
- self.outputCol = Param(self, "outputCol", "output column name.", str)
self._setDefault(outputCol=self.uid + '__output')
def setOutputCol(self, value):
@@ -301,13 +271,10 @@ class HasNumFeatures(Params):
Mixin for param numFeatures: number of features.
"""
- # a placeholder to make it appear in the generated doc
numFeatures = Param(Params._dummy(), "numFeatures", "number of features.", int)
def __init__(self):
super(HasNumFeatures, self).__init__()
- #: param for number of features.
- self.numFeatures = Param(self, "numFeatures", "number of features.", int)
def setNumFeatures(self, value):
"""
@@ -328,13 +295,10 @@ class HasCheckpointInterval(Params):
Mixin for param checkpointInterval: set checkpoint interval (>= 1) or disable checkpoint (-1). E.g. 10 means that the cache will get checkpointed every 10 iterations.
"""
- # a placeholder to make it appear in the generated doc
checkpointInterval = Param(Params._dummy(), "checkpointInterval", "set checkpoint interval (>= 1) or disable checkpoint (-1). E.g. 10 means that the cache will get checkpointed every 10 iterations.", int)
def __init__(self):
super(HasCheckpointInterval, self).__init__()
- #: param for set checkpoint interval (>= 1) or disable checkpoint (-1). E.g. 10 means that the cache will get checkpointed every 10 iterations.
- self.checkpointInterval = Param(self, "checkpointInterval", "set checkpoint interval (>= 1) or disable checkpoint (-1). E.g. 10 means that the cache will get checkpointed every 10 iterations.", int)
def setCheckpointInterval(self, value):
"""
@@ -355,13 +319,10 @@ class HasSeed(Params):
Mixin for param seed: random seed.
"""
- # a placeholder to make it appear in the generated doc
seed = Param(Params._dummy(), "seed", "random seed.", int)
def __init__(self):
super(HasSeed, self).__init__()
- #: param for random seed.
- self.seed = Param(self, "seed", "random seed.", int)
self._setDefault(seed=hash(type(self).__name__))
def setSeed(self, value):
@@ -383,13 +344,10 @@ class HasTol(Params):
Mixin for param tol: the convergence tolerance for iterative algorithms.
"""
- # a placeholder to make it appear in the generated doc
tol = Param(Params._dummy(), "tol", "the convergence tolerance for iterative algorithms.", float)
def __init__(self):
super(HasTol, self).__init__()
- #: param for the convergence tolerance for iterative algorithms.
- self.tol = Param(self, "tol", "the convergence tolerance for iterative algorithms.", float)
def setTol(self, value):
"""
@@ -410,13 +368,10 @@ class HasStepSize(Params):
Mixin for param stepSize: Step size to be used for each iteration of optimization.
"""
- # a placeholder to make it appear in the generated doc
stepSize = Param(Params._dummy(), "stepSize", "Step size to be used for each iteration of optimization.", float)
def __init__(self):
super(HasStepSize, self).__init__()
- #: param for Step size to be used for each iteration of optimization.
- self.stepSize = Param(self, "stepSize", "Step size to be used for each iteration of optimization.", float)
def setStepSize(self, value):
"""
@@ -437,13 +392,10 @@ class HasHandleInvalid(Params):
Mixin for param handleInvalid: how to handle invalid entries. Options are skip (which will filter out rows with bad values), or error (which will throw an errror). More options may be added later.
"""
- # a placeholder to make it appear in the generated doc
handleInvalid = Param(Params._dummy(), "handleInvalid", "how to handle invalid entries. Options are skip (which will filter out rows with bad values), or error (which will throw an errror). More options may be added later.", str)
def __init__(self):
super(HasHandleInvalid, self).__init__()
- #: param for how to handle invalid entries. Options are skip (which will filter out rows with bad values), or error (which will throw an errror). More options may be added later.
- self.handleInvalid = Param(self, "handleInvalid", "how to handle invalid entries. Options are skip (which will filter out rows with bad values), or error (which will throw an errror). More options may be added later.", str)
def setHandleInvalid(self, value):
"""
@@ -464,13 +416,10 @@ class HasElasticNetParam(Params):
Mixin for param elasticNetParam: the ElasticNet mixing parameter, in range [0, 1]. For alpha = 0, the penalty is an L2 penalty. For alpha = 1, it is an L1 penalty.
"""
- # a placeholder to make it appear in the generated doc
elasticNetParam = Param(Params._dummy(), "elasticNetParam", "the ElasticNet mixing parameter, in range [0, 1]. For alpha = 0, the penalty is an L2 penalty. For alpha = 1, it is an L1 penalty.", float)
def __init__(self):
super(HasElasticNetParam, self).__init__()
- #: param for the ElasticNet mixing parameter, in range [0, 1]. For alpha = 0, the penalty is an L2 penalty. For alpha = 1, it is an L1 penalty.
- self.elasticNetParam = Param(self, "elasticNetParam", "the ElasticNet mixing parameter, in range [0, 1]. For alpha = 0, the penalty is an L2 penalty. For alpha = 1, it is an L1 penalty.", float)
self._setDefault(elasticNetParam=0.0)
def setElasticNetParam(self, value):
@@ -492,13 +441,10 @@ class HasFitIntercept(Params):
Mixin for param fitIntercept: whether to fit an intercept term.
"""
- # a placeholder to make it appear in the generated doc
fitIntercept = Param(Params._dummy(), "fitIntercept", "whether to fit an intercept term.", bool)
def __init__(self):
super(HasFitIntercept, self).__init__()
- #: param for whether to fit an intercept term.
- self.fitIntercept = Param(self, "fitIntercept", "whether to fit an intercept term.", bool)
self._setDefault(fitIntercept=True)
def setFitIntercept(self, value):
@@ -520,13 +466,10 @@ class HasStandardization(Params):
Mixin for param standardization: whether to standardize the training features before fitting the model.
"""
- # a placeholder to make it appear in the generated doc
standardization = Param(Params._dummy(), "standardization", "whether to standardize the training features before fitting the model.", bool)
def __init__(self):
super(HasStandardization, self).__init__()
- #: param for whether to standardize the training features before fitting the model.
- self.standardization = Param(self, "standardization", "whether to standardize the training features before fitting the model.", bool)
self._setDefault(standardization=True)
def setStandardization(self, value):
@@ -548,13 +491,10 @@ class HasThresholds(Params):
Mixin for param thresholds: Thresholds in multi-class classification to adjust the probability of predicting each class. Array must have length equal to the number of classes, with values >= 0. The class with largest value p/t is predicted, where p is the original probability of that class and t is the class' threshold.
"""
- # a placeholder to make it appear in the generated doc
thresholds = Param(Params._dummy(), "thresholds", "Thresholds in multi-class classification to adjust the probability of predicting each class. Array must have length equal to the number of classes, with values >= 0. The class with largest value p/t is predicted, where p is the original probability of that class and t is the class' threshold.", None)
def __init__(self):
super(HasThresholds, self).__init__()
- #: param for Thresholds in multi-class classification to adjust the probability of predicting each class. Array must have length equal to the number of classes, with values >= 0. The class with largest value p/t is predicted, where p is the original probability of that class and t is the class' threshold.
- self.thresholds = Param(self, "thresholds", "Thresholds in multi-class classification to adjust the probability of predicting each class. Array must have length equal to the number of classes, with values >= 0. The class with largest value p/t is predicted, where p is the original probability of that class and t is the class' threshold.", None)
def setThresholds(self, value):
"""
@@ -575,13 +515,10 @@ class HasWeightCol(Params):
Mixin for param weightCol: weight column name. If this is not set or empty, we treat all instance weights as 1.0.
"""
- # a placeholder to make it appear in the generated doc
weightCol = Param(Params._dummy(), "weightCol", "weight column name. If this is not set or empty, we treat all instance weights as 1.0.", str)
def __init__(self):
super(HasWeightCol, self).__init__()
- #: param for weight column name. If this is not set or empty, we treat all instance weights as 1.0.
- self.weightCol = Param(self, "weightCol", "weight column name. If this is not set or empty, we treat all instance weights as 1.0.", str)
def setWeightCol(self, value):
"""
@@ -602,13 +539,10 @@ class HasSolver(Params):
Mixin for param solver: the solver algorithm for optimization. If this is not set or empty, default value is 'auto'.
"""
- # a placeholder to make it appear in the generated doc
solver = Param(Params._dummy(), "solver", "the solver algorithm for optimization. If this is not set or empty, default value is 'auto'.", str)
def __init__(self):
super(HasSolver, self).__init__()
- #: param for the solver algorithm for optimization. If this is not set or empty, default value is 'auto'.
- self.solver = Param(self, "solver", "the solver algorithm for optimization. If this is not set or empty, default value is 'auto'.", str)
self._setDefault(solver='auto')
def setSolver(self, value):
@@ -630,7 +564,6 @@ class DecisionTreeParams(Params):
Mixin for Decision Tree parameters.
"""
- # a placeholder to make it appear in the generated doc
maxDepth = Param(Params._dummy(), "maxDepth", "Maximum depth of the tree. (>= 0) E.g., depth 0 means 1 leaf node; depth 1 means 1 internal node + 2 leaf nodes.")
maxBins = Param(Params._dummy(), "maxBins", "Max number of bins for discretizing continuous features. Must be >=2 and >= number of categories for any categorical feature.")
minInstancesPerNode = Param(Params._dummy(), "minInstancesPerNode", "Minimum number of instances each child must have after split. If a split causes the left or right child to have fewer than minInstancesPerNode, the split will be discarded as invalid. Should be >= 1.")
@@ -641,19 +574,7 @@ class DecisionTreeParams(Params):
def __init__(self):
super(DecisionTreeParams, self).__init__()
- #: param for Maximum depth of the tree. (>= 0) E.g., depth 0 means 1 leaf node; depth 1 means 1 internal node + 2 leaf nodes.
- self.maxDepth = Param(self, "maxDepth", "Maximum depth of the tree. (>= 0) E.g., depth 0 means 1 leaf node; depth 1 means 1 internal node + 2 leaf nodes.")
- #: param for Max number of bins for discretizing continuous features. Must be >=2 and >= number of categories for any categorical feature.
- self.maxBins = Param(self, "maxBins", "Max number of bins for discretizing continuous features. Must be >=2 and >= number of categories for any categorical feature.")
- #: param for Minimum number of instances each child must have after split. If a split causes the left or right child to have fewer than minInstancesPerNode, the split will be discarded as invalid. Should be >= 1.
- self.minInstancesPerNode = Param(self, "minInstancesPerNode", "Minimum number of instances each child must have after split. If a split causes the left or right child to have fewer than minInstancesPerNode, the split will be discarded as invalid. Should be >= 1.")
- #: param for Minimum information gain for a split to be considered at a tree node.
- self.minInfoGain = Param(self, "minInfoGain", "Minimum information gain for a split to be considered at a tree node.")
- #: param for Maximum memory in MB allocated to histogram aggregation.
- self.maxMemoryInMB = Param(self, "maxMemoryInMB", "Maximum memory in MB allocated to histogram aggregation.")
- #: param for If false, the algorithm will pass trees to executors to match instances with nodes. If true, the algorithm will cache node IDs for each instance. Caching can speed up training of deeper trees. Users can set how often should the cache be checkpointed or disable it by setting checkpointInterval.
- self.cacheNodeIds = Param(self, "cacheNodeIds", "If false, the algorithm will pass trees to executors to match instances with nodes. If true, the algorithm will cache node IDs for each instance. Caching can speed up training of deeper trees. Users can set how often should the cache be checkpointed or disable it by setting checkpointInterval.")
-
+
def setMaxDepth(self, value):
"""
Sets the value of :py:attr:`maxDepth`.