aboutsummaryrefslogtreecommitdiff
path: root/mllib
diff options
context:
space:
mode:
authorXiangrui Meng <meng@databricks.com>2016-04-30 00:41:28 -0700
committerXiangrui Meng <meng@databricks.com>2016-04-30 00:41:28 -0700
commit7fbe1bb24d6c5657da133135419fb29a609e32c7 (patch)
treed0ab912b509d139df78a90268112753dda3ad8e8 /mllib
parent5886b6217b7ac783ec605e38f5d960048d448976 (diff)
downloadspark-7fbe1bb24d6c5657da133135419fb29a609e32c7.tar.gz
spark-7fbe1bb24d6c5657da133135419fb29a609e32c7.tar.bz2
spark-7fbe1bb24d6c5657da133135419fb29a609e32c7.zip
[SPARK-14412][.2][ML] rename *RDDStorageLevel to *StorageLevel in ml.ALS
## What changes were proposed in this pull request? As discussed in #12660, this PR renames * intermediateRDDStorageLevel -> intermediateStorageLevel * finalRDDStorageLevel -> finalStorageLevel The argument name in `ALS.train` will be addressed in SPARK-15027. ## How was this patch tested? Existing unit tests. Author: Xiangrui Meng <meng@databricks.com> Closes #12803 from mengxr/SPARK-14412.
Diffstat (limited to 'mllib')
-rw-r--r--mllib/src/main/scala/org/apache/spark/ml/recommendation/ALS.scala30
-rw-r--r--mllib/src/test/scala/org/apache/spark/ml/recommendation/ALSSuite.scala14
2 files changed, 22 insertions, 22 deletions
diff --git a/mllib/src/main/scala/org/apache/spark/ml/recommendation/ALS.scala b/mllib/src/main/scala/org/apache/spark/ml/recommendation/ALS.scala
index 55cea800d9..541923048a 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/recommendation/ALS.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/recommendation/ALS.scala
@@ -154,37 +154,37 @@ private[recommendation] trait ALSParams extends ALSModelParams with HasMaxIter w
def getNonnegative: Boolean = $(nonnegative)
/**
- * Param for StorageLevel for intermediate RDDs. Pass in a string representation of
+ * Param for StorageLevel for intermediate datasets. Pass in a string representation of
* [[StorageLevel]]. Cannot be "NONE".
* Default: "MEMORY_AND_DISK".
*
* @group expertParam
*/
- val intermediateRDDStorageLevel = new Param[String](this, "intermediateRDDStorageLevel",
- "StorageLevel for intermediate RDDs. Cannot be 'NONE'. Default: 'MEMORY_AND_DISK'.",
+ val intermediateStorageLevel = new Param[String](this, "intermediateStorageLevel",
+ "StorageLevel for intermediate datasets. Cannot be 'NONE'. Default: 'MEMORY_AND_DISK'.",
(s: String) => Try(StorageLevel.fromString(s)).isSuccess && s != "NONE")
/** @group expertGetParam */
- def getIntermediateRDDStorageLevel: String = $(intermediateRDDStorageLevel)
+ def getIntermediateStorageLevel: String = $(intermediateStorageLevel)
/**
- * Param for StorageLevel for ALS model factor RDDs. Pass in a string representation of
+ * Param for StorageLevel for ALS model factors. Pass in a string representation of
* [[StorageLevel]].
* Default: "MEMORY_AND_DISK".
*
* @group expertParam
*/
- val finalRDDStorageLevel = new Param[String](this, "finalRDDStorageLevel",
- "StorageLevel for ALS model factor RDDs. Default: 'MEMORY_AND_DISK'.",
+ val finalStorageLevel = new Param[String](this, "finalStorageLevel",
+ "StorageLevel for ALS model factors. Default: 'MEMORY_AND_DISK'.",
(s: String) => Try(StorageLevel.fromString(s)).isSuccess)
/** @group expertGetParam */
- def getFinalRDDStorageLevel: String = $(finalRDDStorageLevel)
+ def getFinalStorageLevel: String = $(finalStorageLevel)
setDefault(rank -> 10, maxIter -> 10, regParam -> 0.1, numUserBlocks -> 10, numItemBlocks -> 10,
implicitPrefs -> false, alpha -> 1.0, userCol -> "user", itemCol -> "item",
ratingCol -> "rating", nonnegative -> false, checkpointInterval -> 10,
- intermediateRDDStorageLevel -> "MEMORY_AND_DISK", finalRDDStorageLevel -> "MEMORY_AND_DISK")
+ intermediateStorageLevel -> "MEMORY_AND_DISK", finalStorageLevel -> "MEMORY_AND_DISK")
/**
* Validates and transforms the input schema.
@@ -406,14 +406,14 @@ class ALS(@Since("1.4.0") override val uid: String) extends Estimator[ALSModel]
/** @group expertSetParam */
@Since("2.0.0")
- def setIntermediateRDDStorageLevel(value: String): this.type = {
- set(intermediateRDDStorageLevel, value)
+ def setIntermediateStorageLevel(value: String): this.type = {
+ set(intermediateStorageLevel, value)
}
/** @group expertSetParam */
@Since("2.0.0")
- def setFinalRDDStorageLevel(value: String): this.type = {
- set(finalRDDStorageLevel, value)
+ def setFinalStorageLevel(value: String): this.type = {
+ set(finalStorageLevel, value)
}
/**
@@ -446,8 +446,8 @@ class ALS(@Since("1.4.0") override val uid: String) extends Estimator[ALSModel]
numUserBlocks = $(numUserBlocks), numItemBlocks = $(numItemBlocks),
maxIter = $(maxIter), regParam = $(regParam), implicitPrefs = $(implicitPrefs),
alpha = $(alpha), nonnegative = $(nonnegative),
- intermediateRDDStorageLevel = StorageLevel.fromString($(intermediateRDDStorageLevel)),
- finalRDDStorageLevel = StorageLevel.fromString($(finalRDDStorageLevel)),
+ intermediateRDDStorageLevel = StorageLevel.fromString($(intermediateStorageLevel)),
+ finalRDDStorageLevel = StorageLevel.fromString($(finalStorageLevel)),
checkpointInterval = $(checkpointInterval), seed = $(seed))
val userDF = userFactors.toDF("id", "features")
val itemDF = itemFactors.toDF("id", "features")
diff --git a/mllib/src/test/scala/org/apache/spark/ml/recommendation/ALSSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/recommendation/ALSSuite.scala
index 2e5c6a4f20..4c4eb72cd1 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/recommendation/ALSSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/recommendation/ALSSuite.scala
@@ -525,13 +525,13 @@ class ALSStorageSuite
test("invalid storage params") {
intercept[IllegalArgumentException] {
- new ALS().setIntermediateRDDStorageLevel("foo")
+ new ALS().setIntermediateStorageLevel("foo")
}
intercept[IllegalArgumentException] {
- new ALS().setIntermediateRDDStorageLevel("NONE")
+ new ALS().setIntermediateStorageLevel("NONE")
}
intercept[IllegalArgumentException] {
- new ALS().setFinalRDDStorageLevel("foo")
+ new ALS().setFinalStorageLevel("foo")
}
}
@@ -563,8 +563,8 @@ class ALSStorageSuite
val nonDefaultListener = new IntermediateRDDStorageListener
sc.addSparkListener(nonDefaultListener)
val nonDefaultModel = als
- .setFinalRDDStorageLevel("MEMORY_ONLY")
- .setIntermediateRDDStorageLevel("DISK_ONLY")
+ .setFinalStorageLevel("MEMORY_ONLY")
+ .setIntermediateStorageLevel("DISK_ONLY")
.fit(data)
// check final factor RDD non-default storage levels
val levels = sc.getPersistentRDDs.collect {
@@ -617,7 +617,7 @@ object ALSSuite {
"alpha" -> 0.9,
"nonnegative" -> true,
"checkpointInterval" -> 20,
- "intermediateRDDStorageLevel" -> "MEMORY_ONLY",
- "finalRDDStorageLevel" -> "MEMORY_AND_DISK_SER"
+ "intermediateStorageLevel" -> "MEMORY_ONLY",
+ "finalStorageLevel" -> "MEMORY_AND_DISK_SER"
)
}