aboutsummaryrefslogtreecommitdiff
path: root/python
diff options
context:
space:
mode:
authorXiangrui Meng <meng@databricks.com>2016-04-30 00:41:28 -0700
committerXiangrui Meng <meng@databricks.com>2016-04-30 00:41:28 -0700
commit7fbe1bb24d6c5657da133135419fb29a609e32c7 (patch)
treed0ab912b509d139df78a90268112753dda3ad8e8 /python
parent5886b6217b7ac783ec605e38f5d960048d448976 (diff)
downloadspark-7fbe1bb24d6c5657da133135419fb29a609e32c7.tar.gz
spark-7fbe1bb24d6c5657da133135419fb29a609e32c7.tar.bz2
spark-7fbe1bb24d6c5657da133135419fb29a609e32c7.zip
[SPARK-14412][.2][ML] rename *RDDStorageLevel to *StorageLevel in ml.ALS
## What changes were proposed in this pull request? As discussed in #12660, this PR renames * intermediateRDDStorageLevel -> intermediateStorageLevel * finalRDDStorageLevel -> finalStorageLevel The argument name in `ALS.train` will be addressed in SPARK-15027. ## How was this patch tested? Existing unit tests. Author: Xiangrui Meng <meng@databricks.com> Closes #12803 from mengxr/SPARK-14412.
Diffstat (limited to 'python')
-rw-r--r--python/pyspark/ml/recommendation.py60
-rw-r--r--python/pyspark/ml/tests.py20
2 files changed, 40 insertions, 40 deletions
diff --git a/python/pyspark/ml/recommendation.py b/python/pyspark/ml/recommendation.py
index 97ac6ea83d..08770d9981 100644
--- a/python/pyspark/ml/recommendation.py
+++ b/python/pyspark/ml/recommendation.py
@@ -119,35 +119,35 @@ class ALS(JavaEstimator, HasCheckpointInterval, HasMaxIter, HasPredictionCol, Ha
nonnegative = Param(Params._dummy(), "nonnegative",
"whether to use nonnegative constraint for least squares",
typeConverter=TypeConverters.toBoolean)
- intermediateRDDStorageLevel = Param(Params._dummy(), "intermediateRDDStorageLevel",
- "StorageLevel for intermediate RDDs. Cannot be 'NONE'. " +
- "Default: 'MEMORY_AND_DISK'.",
- typeConverter=TypeConverters.toString)
- finalRDDStorageLevel = Param(Params._dummy(), "finalRDDStorageLevel",
- "StorageLevel for ALS model factor RDDs. " +
- "Default: 'MEMORY_AND_DISK'.",
- typeConverter=TypeConverters.toString)
+ intermediateStorageLevel = Param(Params._dummy(), "intermediateStorageLevel",
+ "StorageLevel for intermediate datasets. Cannot be 'NONE'. " +
+ "Default: 'MEMORY_AND_DISK'.",
+ typeConverter=TypeConverters.toString)
+ finalStorageLevel = Param(Params._dummy(), "finalStorageLevel",
+ "StorageLevel for ALS model factors. " +
+ "Default: 'MEMORY_AND_DISK'.",
+ typeConverter=TypeConverters.toString)
@keyword_only
def __init__(self, rank=10, maxIter=10, regParam=0.1, numUserBlocks=10, numItemBlocks=10,
implicitPrefs=False, alpha=1.0, userCol="user", itemCol="item", seed=None,
ratingCol="rating", nonnegative=False, checkpointInterval=10,
- intermediateRDDStorageLevel="MEMORY_AND_DISK",
- finalRDDStorageLevel="MEMORY_AND_DISK"):
+ intermediateStorageLevel="MEMORY_AND_DISK",
+ finalStorageLevel="MEMORY_AND_DISK"):
"""
__init__(self, rank=10, maxIter=10, regParam=0.1, numUserBlocks=10, numItemBlocks=10, \
implicitPrefs=false, alpha=1.0, userCol="user", itemCol="item", seed=None, \
ratingCol="rating", nonnegative=false, checkpointInterval=10, \
- intermediateRDDStorageLevel="MEMORY_AND_DISK", \
- finalRDDStorageLevel="MEMORY_AND_DISK")
+ intermediateStorageLevel="MEMORY_AND_DISK", \
+ finalStorageLevel="MEMORY_AND_DISK")
"""
super(ALS, self).__init__()
self._java_obj = self._new_java_obj("org.apache.spark.ml.recommendation.ALS", self.uid)
self._setDefault(rank=10, maxIter=10, regParam=0.1, numUserBlocks=10, numItemBlocks=10,
implicitPrefs=False, alpha=1.0, userCol="user", itemCol="item", seed=None,
ratingCol="rating", nonnegative=False, checkpointInterval=10,
- intermediateRDDStorageLevel="MEMORY_AND_DISK",
- finalRDDStorageLevel="MEMORY_AND_DISK")
+ intermediateStorageLevel="MEMORY_AND_DISK",
+ finalStorageLevel="MEMORY_AND_DISK")
kwargs = self.__init__._input_kwargs
self.setParams(**kwargs)
@@ -156,14 +156,14 @@ class ALS(JavaEstimator, HasCheckpointInterval, HasMaxIter, HasPredictionCol, Ha
def setParams(self, rank=10, maxIter=10, regParam=0.1, numUserBlocks=10, numItemBlocks=10,
implicitPrefs=False, alpha=1.0, userCol="user", itemCol="item", seed=None,
ratingCol="rating", nonnegative=False, checkpointInterval=10,
- intermediateRDDStorageLevel="MEMORY_AND_DISK",
- finalRDDStorageLevel="MEMORY_AND_DISK"):
+ intermediateStorageLevel="MEMORY_AND_DISK",
+ finalStorageLevel="MEMORY_AND_DISK"):
"""
setParams(self, rank=10, maxIter=10, regParam=0.1, numUserBlocks=10, numItemBlocks=10, \
implicitPrefs=False, alpha=1.0, userCol="user", itemCol="item", seed=None, \
ratingCol="rating", nonnegative=False, checkpointInterval=10, \
- intermediateRDDStorageLevel="MEMORY_AND_DISK", \
- finalRDDStorageLevel="MEMORY_AND_DISK")
+ intermediateStorageLevel="MEMORY_AND_DISK", \
+ finalStorageLevel="MEMORY_AND_DISK")
Sets params for ALS.
"""
kwargs = self.setParams._input_kwargs
@@ -316,34 +316,34 @@ class ALS(JavaEstimator, HasCheckpointInterval, HasMaxIter, HasPredictionCol, Ha
return self.getOrDefault(self.nonnegative)
@since("2.0.0")
- def setIntermediateRDDStorageLevel(self, value):
+ def setIntermediateStorageLevel(self, value):
"""
- Sets the value of :py:attr:`intermediateRDDStorageLevel`.
+ Sets the value of :py:attr:`intermediateStorageLevel`.
"""
- self._set(intermediateRDDStorageLevel=value)
+ self._set(intermediateStorageLevel=value)
return self
@since("2.0.0")
- def getIntermediateRDDStorageLevel(self):
+ def getIntermediateStorageLevel(self):
"""
- Gets the value of intermediateRDDStorageLevel or its default value.
+ Gets the value of intermediateStorageLevel or its default value.
"""
- return self.getOrDefault(self.intermediateRDDStorageLevel)
+ return self.getOrDefault(self.intermediateStorageLevel)
@since("2.0.0")
- def setFinalRDDStorageLevel(self, value):
+ def setFinalStorageLevel(self, value):
"""
- Sets the value of :py:attr:`finalRDDStorageLevel`.
+ Sets the value of :py:attr:`finalStorageLevel`.
"""
- self._set(finalRDDStorageLevel=value)
+ self._set(finalStorageLevel=value)
return self
@since("2.0.0")
- def getFinalRDDStorageLevel(self):
+ def getFinalStorageLevel(self):
"""
- Gets the value of finalRDDStorageLevel or its default value.
+ Gets the value of finalStorageLevel or its default value.
"""
- return self.getOrDefault(self.finalRDDStorageLevel)
+ return self.getOrDefault(self.finalStorageLevel)
class ALSModel(JavaModel, JavaMLWritable, JavaMLReadable):
diff --git a/python/pyspark/ml/tests.py b/python/pyspark/ml/tests.py
index 7722d57e9e..d5dd6d43c2 100644
--- a/python/pyspark/ml/tests.py
+++ b/python/pyspark/ml/tests.py
@@ -1012,18 +1012,18 @@ class ALSTest(PySparkTestCase):
als = ALS().setMaxIter(1).setRank(1)
# test default params
als.fit(df)
- self.assertEqual(als.getIntermediateRDDStorageLevel(), "MEMORY_AND_DISK")
- self.assertEqual(als._java_obj.getIntermediateRDDStorageLevel(), "MEMORY_AND_DISK")
- self.assertEqual(als.getFinalRDDStorageLevel(), "MEMORY_AND_DISK")
- self.assertEqual(als._java_obj.getFinalRDDStorageLevel(), "MEMORY_AND_DISK")
+ self.assertEqual(als.getIntermediateStorageLevel(), "MEMORY_AND_DISK")
+ self.assertEqual(als._java_obj.getIntermediateStorageLevel(), "MEMORY_AND_DISK")
+ self.assertEqual(als.getFinalStorageLevel(), "MEMORY_AND_DISK")
+ self.assertEqual(als._java_obj.getFinalStorageLevel(), "MEMORY_AND_DISK")
# test non-default params
- als.setIntermediateRDDStorageLevel("MEMORY_ONLY_2")
- als.setFinalRDDStorageLevel("DISK_ONLY")
+ als.setIntermediateStorageLevel("MEMORY_ONLY_2")
+ als.setFinalStorageLevel("DISK_ONLY")
als.fit(df)
- self.assertEqual(als.getIntermediateRDDStorageLevel(), "MEMORY_ONLY_2")
- self.assertEqual(als._java_obj.getIntermediateRDDStorageLevel(), "MEMORY_ONLY_2")
- self.assertEqual(als.getFinalRDDStorageLevel(), "DISK_ONLY")
- self.assertEqual(als._java_obj.getFinalRDDStorageLevel(), "DISK_ONLY")
+ self.assertEqual(als.getIntermediateStorageLevel(), "MEMORY_ONLY_2")
+ self.assertEqual(als._java_obj.getIntermediateStorageLevel(), "MEMORY_ONLY_2")
+ self.assertEqual(als.getFinalStorageLevel(), "DISK_ONLY")
+ self.assertEqual(als._java_obj.getFinalStorageLevel(), "DISK_ONLY")
if __name__ == "__main__":