diff options
author | Feynman Liang <fliang@databricks.com> | 2015-08-25 13:21:05 -0700 |
---|---|---|
committer | Joseph K. Bradley <joseph@databricks.com> | 2015-08-25 13:21:05 -0700 |
commit | c0e9ff1588b4d9313cc6ec6e00e5c7663eb67910 (patch) | |
tree | 82ff44fed289f5c2b723a1e7dc16ca3f53d178a7 | |
parent | 71a138cd0e0a14e8426f97877e3b52a562bbd02c (diff) | |
download | spark-c0e9ff1588b4d9313cc6ec6e00e5c7663eb67910.tar.gz spark-c0e9ff1588b4d9313cc6ec6e00e5c7663eb67910.tar.bz2 spark-c0e9ff1588b4d9313cc6ec6e00e5c7663eb67910.zip |
[SPARK-9800] Adds docs for GradientDescent$.runMiniBatchSGD alias
* Adds doc for alias of runMIniBatchSGD documenting default value for convergeTol
* Cleans up a note in code
Author: Feynman Liang <fliang@databricks.com>
Closes #8425 from feynmanliang/SPARK-9800.
-rw-r--r-- | mllib/src/main/scala/org/apache/spark/mllib/optimization/GradientDescent.scala | 5 |
1 files changed, 4 insertions, 1 deletions
diff --git a/mllib/src/main/scala/org/apache/spark/mllib/optimization/GradientDescent.scala b/mllib/src/main/scala/org/apache/spark/mllib/optimization/GradientDescent.scala index 8f0d1e4aa0..3b663b5def 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/optimization/GradientDescent.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/optimization/GradientDescent.scala @@ -235,7 +235,7 @@ object GradientDescent extends Logging { if (miniBatchSize > 0) { /** - * NOTE(Xinghao): lossSum is computed using the weights from the previous iteration + * lossSum is computed using the weights from the previous iteration * and regVal is the regularization value computed in the previous iteration as well. */ stochasticLossHistory.append(lossSum / miniBatchSize + regVal) @@ -264,6 +264,9 @@ object GradientDescent extends Logging { } + /** + * Alias of [[runMiniBatchSGD]] with convergenceTol set to default value of 0.001. + */ def runMiniBatchSGD( data: RDD[(Double, Vector)], gradient: Gradient, |