aboutsummaryrefslogtreecommitdiff
path: root/mllib/src
diff options
context:
space:
mode:
authorwm624@hotmail.com <wm624@hotmail.com>2017-01-27 16:03:53 -0800
committerJoseph K. Bradley <joseph@databricks.com>2017-01-27 16:03:53 -0800
commitbb1a1fe05e293c480c88123d4c83a6b8c25f6e2e (patch)
tree3052e47ad7cae1badb4257fb75eea3a1dbbdab68 /mllib/src
parent21aa8c32ba7a29aafc000ecce2e6c802ced6a009 (diff)
downloadspark-bb1a1fe05e293c480c88123d4c83a6b8c25f6e2e.tar.gz
spark-bb1a1fe05e293c480c88123d4c83a6b8c25f6e2e.tar.bz2
spark-bb1a1fe05e293c480c88123d4c83a6b8c25f6e2e.zip
[SPARK-19336][ML][PYSPARK] LinearSVC Python API
## What changes were proposed in this pull request? Add Python API for the newly added LinearSVC algorithm. ## How was this patch tested? Add new doc string test. Author: wm624@hotmail.com <wm624@hotmail.com> Closes #16694 from wangmiao1981/ser.
Diffstat (limited to 'mllib/src')
-rw-r--r--mllib/src/main/scala/org/apache/spark/ml/classification/LinearSVC.scala4
1 files changed, 2 insertions, 2 deletions
diff --git a/mllib/src/main/scala/org/apache/spark/ml/classification/LinearSVC.scala b/mllib/src/main/scala/org/apache/spark/ml/classification/LinearSVC.scala
index c4e93bf5e8..3b14c4b004 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/classification/LinearSVC.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/classification/LinearSVC.scala
@@ -114,7 +114,7 @@ class LinearSVC @Since("2.2.0") (
setDefault(standardization -> true)
/**
- * Sets the value of param [[weightCol]].
+ * Set the value of param [[weightCol]].
* If this is not set or empty, we treat all instance weights as 1.0.
* Default is not set, so all instances have weight one.
*
@@ -421,7 +421,7 @@ private class LinearSVCCostFun(
/**
* LinearSVCAggregator computes the gradient and loss for hinge loss function, as used
- * in binary classification for instances in sparse or dense vector in a online fashion.
+ * in binary classification for instances in sparse or dense vector in an online fashion.
*
* Two LinearSVCAggregator can be merged together to have a summary of loss and gradient of
* the corresponding joint dataset.