aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorFeynman Liang <fliang@databricks.com>2015-08-25 13:21:05 -0700
committerJoseph K. Bradley <joseph@databricks.com>2015-08-25 13:21:16 -0700
commit95e44b4df81b09803be2fde8c4e2566be0c8fdbc (patch)
tree47b4933d7d1e6b750fcd852ec6b2bef4b11d732a
parent5a32ed75c939dc42886ea940aba2b14b89e9f40e (diff)
downloadspark-95e44b4df81b09803be2fde8c4e2566be0c8fdbc.tar.gz
spark-95e44b4df81b09803be2fde8c4e2566be0c8fdbc.tar.bz2
spark-95e44b4df81b09803be2fde8c4e2566be0c8fdbc.zip
[SPARK-9800] Adds docs for GradientDescent$.runMiniBatchSGD alias
* Adds doc for alias of runMIniBatchSGD documenting default value for convergeTol * Cleans up a note in code Author: Feynman Liang <fliang@databricks.com> Closes #8425 from feynmanliang/SPARK-9800. (cherry picked from commit c0e9ff1588b4d9313cc6ec6e00e5c7663eb67910) Signed-off-by: Joseph K. Bradley <joseph@databricks.com>
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/optimization/GradientDescent.scala5
1 files changed, 4 insertions, 1 deletions
diff --git a/mllib/src/main/scala/org/apache/spark/mllib/optimization/GradientDescent.scala b/mllib/src/main/scala/org/apache/spark/mllib/optimization/GradientDescent.scala
index 8f0d1e4aa0..3b663b5def 100644
--- a/mllib/src/main/scala/org/apache/spark/mllib/optimization/GradientDescent.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/optimization/GradientDescent.scala
@@ -235,7 +235,7 @@ object GradientDescent extends Logging {
if (miniBatchSize > 0) {
/**
- * NOTE(Xinghao): lossSum is computed using the weights from the previous iteration
+ * lossSum is computed using the weights from the previous iteration
* and regVal is the regularization value computed in the previous iteration as well.
*/
stochasticLossHistory.append(lossSum / miniBatchSize + regVal)
@@ -264,6 +264,9 @@ object GradientDescent extends Logging {
}
+ /**
+ * Alias of [[runMiniBatchSGD]] with convergenceTol set to default value of 0.001.
+ */
def runMiniBatchSGD(
data: RDD[(Double, Vector)],
gradient: Gradient,