diff options
author | Yanbo Liang <ybliang8@gmail.com> | 2016-02-22 23:37:09 -0800 |
---|---|---|
committer | Xiangrui Meng <meng@databricks.com> | 2016-02-22 23:37:09 -0800 |
commit | 72427c3e115daf06f7ad8aa50115a8e0da2c6d62 (patch) | |
tree | 4f193b6e3d4ffcd30b08149aa2faed5fe08bf1ac | |
parent | 4fd1993692d45a0da0289b8c7669cc1dc3fe0f2b (diff) | |
download | spark-72427c3e115daf06f7ad8aa50115a8e0da2c6d62.tar.gz spark-72427c3e115daf06f7ad8aa50115a8e0da2c6d62.tar.bz2 spark-72427c3e115daf06f7ad8aa50115a8e0da2c6d62.zip |
[SPARK-13429][MLLIB] Unify Logistic Regression convergence tolerance of ML & MLlib
## What changes were proposed in this pull request?
In order to provide better and consistent result, let's change the default value of MLlib ```LogisticRegressionWithLBFGS convergenceTol``` from ```1E-4``` to ```1E-6``` which will be equal to ML ```LogisticRegression```.
cc dbtsai
## How was the this patch tested?
unit tests
Author: Yanbo Liang <ybliang8@gmail.com>
Closes #11299 from yanboliang/spark-13429.
3 files changed, 12 insertions, 12 deletions
diff --git a/mllib/src/main/scala/org/apache/spark/mllib/optimization/LBFGS.scala b/mllib/src/main/scala/org/apache/spark/mllib/optimization/LBFGS.scala index a5bd77e6be..11179a21c8 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/optimization/LBFGS.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/optimization/LBFGS.scala @@ -41,7 +41,7 @@ class LBFGS(private var gradient: Gradient, private var updater: Updater) extends Optimizer with Logging { private var numCorrections = 10 - private var convergenceTol = 1E-4 + private var convergenceTol = 1E-6 private var maxNumIterations = 100 private var regParam = 0.0 @@ -59,7 +59,7 @@ class LBFGS(private var gradient: Gradient, private var updater: Updater) } /** - * Set the convergence tolerance of iterations for L-BFGS. Default 1E-4. + * Set the convergence tolerance of iterations for L-BFGS. Default 1E-6. * Smaller value will lead to higher accuracy with the cost of more iterations. * This value must be nonnegative. Lower convergence values are less tolerant * and therefore generally cause more iterations to be run. diff --git a/mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala index d140545e37..cea0adc55c 100644 --- a/mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala +++ b/mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala @@ -667,9 +667,9 @@ class LogisticRegressionSuite extends SparkFunSuite with MLlibTestSparkContext w test("binary logistic regression with intercept with L1 regularization") { val trainer1 = new LogisticRegressionWithLBFGS().setIntercept(true).setFeatureScaling(true) - trainer1.optimizer.setUpdater(new L1Updater).setRegParam(0.12).setConvergenceTol(1E-6) + trainer1.optimizer.setUpdater(new L1Updater).setRegParam(0.12) val trainer2 = new LogisticRegressionWithLBFGS().setIntercept(true).setFeatureScaling(false) - trainer2.optimizer.setUpdater(new L1Updater).setRegParam(0.12).setConvergenceTol(1E-6) + trainer2.optimizer.setUpdater(new L1Updater).setRegParam(0.12) val model1 = trainer1.run(binaryDataset) val model2 = trainer2.run(binaryDataset) @@ -726,9 +726,9 @@ class LogisticRegressionSuite extends SparkFunSuite with MLlibTestSparkContext w test("binary logistic regression without intercept with L1 regularization") { val trainer1 = new LogisticRegressionWithLBFGS().setIntercept(false).setFeatureScaling(true) - trainer1.optimizer.setUpdater(new L1Updater).setRegParam(0.12).setConvergenceTol(1E-6) + trainer1.optimizer.setUpdater(new L1Updater).setRegParam(0.12) val trainer2 = new LogisticRegressionWithLBFGS().setIntercept(false).setFeatureScaling(false) - trainer2.optimizer.setUpdater(new L1Updater).setRegParam(0.12).setConvergenceTol(1E-6) + trainer2.optimizer.setUpdater(new L1Updater).setRegParam(0.12) val model1 = trainer1.run(binaryDataset) val model2 = trainer2.run(binaryDataset) @@ -786,9 +786,9 @@ class LogisticRegressionSuite extends SparkFunSuite with MLlibTestSparkContext w test("binary logistic regression with intercept with L2 regularization") { val trainer1 = new LogisticRegressionWithLBFGS().setIntercept(true).setFeatureScaling(true) - trainer1.optimizer.setUpdater(new SquaredL2Updater).setRegParam(1.37).setConvergenceTol(1E-6) + trainer1.optimizer.setUpdater(new SquaredL2Updater).setRegParam(1.37) val trainer2 = new LogisticRegressionWithLBFGS().setIntercept(true).setFeatureScaling(false) - trainer2.optimizer.setUpdater(new SquaredL2Updater).setRegParam(1.37).setConvergenceTol(1E-6) + trainer2.optimizer.setUpdater(new SquaredL2Updater).setRegParam(1.37) val model1 = trainer1.run(binaryDataset) val model2 = trainer2.run(binaryDataset) @@ -845,9 +845,9 @@ class LogisticRegressionSuite extends SparkFunSuite with MLlibTestSparkContext w test("binary logistic regression without intercept with L2 regularization") { val trainer1 = new LogisticRegressionWithLBFGS().setIntercept(false).setFeatureScaling(true) - trainer1.optimizer.setUpdater(new SquaredL2Updater).setRegParam(1.37).setConvergenceTol(1E-6) + trainer1.optimizer.setUpdater(new SquaredL2Updater).setRegParam(1.37) val trainer2 = new LogisticRegressionWithLBFGS().setIntercept(false).setFeatureScaling(false) - trainer2.optimizer.setUpdater(new SquaredL2Updater).setRegParam(1.37).setConvergenceTol(1E-6) + trainer2.optimizer.setUpdater(new SquaredL2Updater).setRegParam(1.37) val model1 = trainer1.run(binaryDataset) val model2 = trainer2.run(binaryDataset) diff --git a/python/pyspark/mllib/classification.py b/python/pyspark/mllib/classification.py index b24592c379..b4d54ef61b 100644 --- a/python/pyspark/mllib/classification.py +++ b/python/pyspark/mllib/classification.py @@ -327,7 +327,7 @@ class LogisticRegressionWithLBFGS(object): @classmethod @since('1.2.0') def train(cls, data, iterations=100, initialWeights=None, regParam=0.01, regType="l2", - intercept=False, corrections=10, tolerance=1e-4, validateData=True, numClasses=2): + intercept=False, corrections=10, tolerance=1e-6, validateData=True, numClasses=2): """ Train a logistic regression model on the given data. @@ -359,7 +359,7 @@ class LogisticRegressionWithLBFGS(object): (default: 10) :param tolerance: The convergence tolerance of iterations for L-BFGS. - (default: 1e-4) + (default: 1e-6) :param validateData: Boolean parameter which indicates if the algorithm should validate data before training. |