aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/optimization/GradientDescent.scala5
1 files changed, 4 insertions, 1 deletions
diff --git a/mllib/src/main/scala/org/apache/spark/mllib/optimization/GradientDescent.scala b/mllib/src/main/scala/org/apache/spark/mllib/optimization/GradientDescent.scala
index 8f0d1e4aa0..3b663b5def 100644
--- a/mllib/src/main/scala/org/apache/spark/mllib/optimization/GradientDescent.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/optimization/GradientDescent.scala
@@ -235,7 +235,7 @@ object GradientDescent extends Logging {
if (miniBatchSize > 0) {
/**
- * NOTE(Xinghao): lossSum is computed using the weights from the previous iteration
+ * lossSum is computed using the weights from the previous iteration
* and regVal is the regularization value computed in the previous iteration as well.
*/
stochasticLossHistory.append(lossSum / miniBatchSize + regVal)
@@ -264,6 +264,9 @@ object GradientDescent extends Logging {
}
+ /**
+ * Alias of [[runMiniBatchSGD]] with convergenceTol set to default value of 0.001.
+ */
def runMiniBatchSGD(
data: RDD[(Double, Vector)],
gradient: Gradient,