From 4c28a2bad8a6d64ee69213eede440837636fe58b Mon Sep 17 00:00:00 2001 From: Matei Zaharia Date: Fri, 10 Jan 2014 00:12:43 -0800 Subject: Update some Python MLlib parameters to use camelCase, and tweak docs We've used camel case in other Spark methods so it felt reasonable to keep using it here and make the code match Scala/Java as much as possible. Note that parameter names matter in Python because it allows passing optional parameters by name. --- python/pyspark/mllib/classification.py | 14 +++++++------- python/pyspark/mllib/regression.py | 28 ++++++++++++++-------------- 2 files changed, 21 insertions(+), 21 deletions(-) (limited to 'python') diff --git a/python/pyspark/mllib/classification.py b/python/pyspark/mllib/classification.py index 03ff5a572e..19b90dfd6e 100644 --- a/python/pyspark/mllib/classification.py +++ b/python/pyspark/mllib/classification.py @@ -44,13 +44,13 @@ class LogisticRegressionModel(LinearModel): class LogisticRegressionWithSGD(object): @classmethod def train(cls, data, iterations=100, step=1.0, - mini_batch_fraction=1.0, initial_weights=None): + miniBatchFraction=1.0, initialWeights=None): """Train a logistic regression model on the given data.""" sc = data.context return _regression_train_wrapper(sc, lambda d, i: sc._jvm.PythonMLLibAPI().trainLogisticRegressionModelWithSGD(d._jrdd, - iterations, step, mini_batch_fraction, i), - LogisticRegressionModel, data, initial_weights) + iterations, step, miniBatchFraction, i), + LogisticRegressionModel, data, initialWeights) class SVMModel(LinearModel): """A support vector machine. @@ -67,14 +67,14 @@ class SVMModel(LinearModel): class SVMWithSGD(object): @classmethod - def train(cls, data, iterations=100, step=1.0, reg_param=1.0, - mini_batch_fraction=1.0, initial_weights=None): + def train(cls, data, iterations=100, step=1.0, regParam=1.0, + miniBatchFraction=1.0, initialWeights=None): """Train a support vector machine on the given data.""" sc = data.context return _regression_train_wrapper(sc, lambda d, i: sc._jvm.PythonMLLibAPI().trainSVMModelWithSGD(d._jrdd, - iterations, step, reg_param, mini_batch_fraction, i), - SVMModel, data, initial_weights) + iterations, step, regParam, miniBatchFraction, i), + SVMModel, data, initialWeights) class NaiveBayesModel(object): """ diff --git a/python/pyspark/mllib/regression.py b/python/pyspark/mllib/regression.py index e90b72893f..7656db07f6 100644 --- a/python/pyspark/mllib/regression.py +++ b/python/pyspark/mllib/regression.py @@ -47,57 +47,57 @@ class LinearRegressionModel(LinearRegressionModelBase): """A linear regression model derived from a least-squares fit. >>> data = array([0.0, 0.0, 1.0, 1.0, 3.0, 2.0, 2.0, 3.0]).reshape(4,2) - >>> lrm = LinearRegressionWithSGD.train(sc.parallelize(data), initial_weights=array([1.0])) + >>> lrm = LinearRegressionWithSGD.train(sc.parallelize(data), initialWeights=array([1.0])) """ class LinearRegressionWithSGD(object): @classmethod def train(cls, data, iterations=100, step=1.0, - mini_batch_fraction=1.0, initial_weights=None): + miniBatchFraction=1.0, initialWeights=None): """Train a linear regression model on the given data.""" sc = data.context return _regression_train_wrapper(sc, lambda d, i: sc._jvm.PythonMLLibAPI().trainLinearRegressionModelWithSGD( - d._jrdd, iterations, step, mini_batch_fraction, i), - LinearRegressionModel, data, initial_weights) + d._jrdd, iterations, step, miniBatchFraction, i), + LinearRegressionModel, data, initialWeights) class LassoModel(LinearRegressionModelBase): """A linear regression model derived from a least-squares fit with an l_1 penalty term. >>> data = array([0.0, 0.0, 1.0, 1.0, 3.0, 2.0, 2.0, 3.0]).reshape(4,2) - >>> lrm = LassoWithSGD.train(sc.parallelize(data), initial_weights=array([1.0])) + >>> lrm = LassoWithSGD.train(sc.parallelize(data), initialWeights=array([1.0])) """ class LassoWithSGD(object): @classmethod - def train(cls, data, iterations=100, step=1.0, reg_param=1.0, - mini_batch_fraction=1.0, initial_weights=None): + def train(cls, data, iterations=100, step=1.0, regParam=1.0, + miniBatchFraction=1.0, initialWeights=None): """Train a Lasso regression model on the given data.""" sc = data.context return _regression_train_wrapper(sc, lambda d, i: sc._jvm.PythonMLLibAPI().trainLassoModelWithSGD(d._jrdd, - iterations, step, reg_param, mini_batch_fraction, i), - LassoModel, data, initial_weights) + iterations, step, regParam, miniBatchFraction, i), + LassoModel, data, initialWeights) class RidgeRegressionModel(LinearRegressionModelBase): """A linear regression model derived from a least-squares fit with an l_2 penalty term. >>> data = array([0.0, 0.0, 1.0, 1.0, 3.0, 2.0, 2.0, 3.0]).reshape(4,2) - >>> lrm = RidgeRegressionWithSGD.train(sc.parallelize(data), initial_weights=array([1.0])) + >>> lrm = RidgeRegressionWithSGD.train(sc.parallelize(data), initialWeights=array([1.0])) """ class RidgeRegressionWithSGD(object): @classmethod - def train(cls, data, iterations=100, step=1.0, reg_param=1.0, - mini_batch_fraction=1.0, initial_weights=None): + def train(cls, data, iterations=100, step=1.0, regParam=1.0, + miniBatchFraction=1.0, initialWeights=None): """Train a ridge regression model on the given data.""" sc = data.context return _regression_train_wrapper(sc, lambda d, i: sc._jvm.PythonMLLibAPI().trainRidgeModelWithSGD(d._jrdd, - iterations, step, reg_param, mini_batch_fraction, i), - RidgeRegressionModel, data, initial_weights) + iterations, step, regParam, miniBatchFraction, i), + RidgeRegressionModel, data, initialWeights) def _test(): import doctest -- cgit v1.2.3