aboutsummaryrefslogtreecommitdiff
path: root/mllib
diff options
context:
space:
mode:
authorsueann <sueann@databricks.com>2017-02-10 11:50:23 -0800
committerJoseph K. Bradley <joseph@databricks.com>2017-02-10 11:50:23 -0800
commit3a43ae7c0bbce8eda98f50a97a0138f860197a98 (patch)
tree530f0ffbf18da458b9921805bc6a7a6a1578a38d /mllib
parentde8a03e68202647555e30fffba551f65bc77608d (diff)
downloadspark-3a43ae7c0bbce8eda98f50a97a0138f860197a98.tar.gz
spark-3a43ae7c0bbce8eda98f50a97a0138f860197a98.tar.bz2
spark-3a43ae7c0bbce8eda98f50a97a0138f860197a98.zip
[SPARK-18613][ML] make spark.mllib LDA dependencies in spark.ml LDA private
## What changes were proposed in this pull request? spark.ml.*LDAModel classes were exposing spark.mllib LDA models via protected methods. Made them package (clustering) private. ## How was this patch tested? ``` build/sbt doc # "millib.clustering" no longer appears in the docs for *LDA* classes build/sbt compile # compiles build/sbt > mllib/testOnly # tests pass ``` Author: sueann <sueann@databricks.com> Closes #16860 from sueann/SPARK-18613.
Diffstat (limited to 'mllib')
-rw-r--r--mllib/src/main/scala/org/apache/spark/ml/clustering/LDA.scala12
1 files changed, 6 insertions, 6 deletions
diff --git a/mllib/src/main/scala/org/apache/spark/ml/clustering/LDA.scala b/mllib/src/main/scala/org/apache/spark/ml/clustering/LDA.scala
index 03f4ac5b28..bbcef3502d 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/clustering/LDA.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/clustering/LDA.scala
@@ -418,11 +418,11 @@ abstract class LDAModel private[ml] (
* If this model was produced by EM, then this local representation may be built lazily.
*/
@Since("1.6.0")
- protected def oldLocalModel: OldLocalLDAModel
+ private[clustering] def oldLocalModel: OldLocalLDAModel
/** Returns underlying spark.mllib model, which may be local or distributed */
@Since("1.6.0")
- protected def getModel: OldLDAModel
+ private[clustering] def getModel: OldLDAModel
private[ml] def getEffectiveDocConcentration: Array[Double] = getModel.docConcentration.toArray
@@ -563,7 +563,7 @@ abstract class LDAModel private[ml] (
class LocalLDAModel private[ml] (
uid: String,
vocabSize: Int,
- @Since("1.6.0") override protected val oldLocalModel: OldLocalLDAModel,
+ @Since("1.6.0") override private[clustering] val oldLocalModel: OldLocalLDAModel,
sparkSession: SparkSession)
extends LDAModel(uid, vocabSize, sparkSession) {
@@ -573,7 +573,7 @@ class LocalLDAModel private[ml] (
copyValues(copied, extra).setParent(parent).asInstanceOf[LocalLDAModel]
}
- override protected def getModel: OldLDAModel = oldLocalModel
+ override private[clustering] def getModel: OldLDAModel = oldLocalModel
@Since("1.6.0")
override def isDistributed: Boolean = false
@@ -656,14 +656,14 @@ class DistributedLDAModel private[ml] (
private var oldLocalModelOption: Option[OldLocalLDAModel])
extends LDAModel(uid, vocabSize, sparkSession) {
- override protected def oldLocalModel: OldLocalLDAModel = {
+ override private[clustering] def oldLocalModel: OldLocalLDAModel = {
if (oldLocalModelOption.isEmpty) {
oldLocalModelOption = Some(oldDistributedModel.toLocal)
}
oldLocalModelOption.get
}
- override protected def getModel: OldLDAModel = oldDistributedModel
+ override private[clustering] def getModel: OldLDAModel = oldDistributedModel
/**
* Convert this distributed model to a local representation. This discards info about the