aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark/ml/feature.py
diff options
context:
space:
mode:
authorXiangrui Meng <meng@databricks.com>2015-08-03 13:59:35 -0700
committerXiangrui Meng <meng@databricks.com>2015-08-03 13:59:35 -0700
commite4765a46833baff1dd7465c4cf50e947de7e8f21 (patch)
tree56956773833adb17e5a0052713d5e5fc88c8ec2d /python/pyspark/ml/feature.py
parent8ca287ebbd58985a568341b08040d0efa9d3641a (diff)
downloadspark-e4765a46833baff1dd7465c4cf50e947de7e8f21.tar.gz
spark-e4765a46833baff1dd7465c4cf50e947de7e8f21.tar.bz2
spark-e4765a46833baff1dd7465c4cf50e947de7e8f21.zip
[SPARK-9544] [MLLIB] add Python API for RFormula
Add Python API for RFormula. Similar to other feature transformers in Python. This is just a thin wrapper over the Scala implementation. ericl MechCoder Author: Xiangrui Meng <meng@databricks.com> Closes #7879 from mengxr/SPARK-9544 and squashes the following commits: 3d5ff03 [Xiangrui Meng] add an doctest for . and - 5e969a5 [Xiangrui Meng] fix pydoc 1cd41f8 [Xiangrui Meng] organize imports 3c18b10 [Xiangrui Meng] add Python API for RFormula
Diffstat (limited to 'python/pyspark/ml/feature.py')
-rw-r--r--python/pyspark/ml/feature.py85
1 files changed, 84 insertions, 1 deletions
diff --git a/python/pyspark/ml/feature.py b/python/pyspark/ml/feature.py
index 015e7a9d49..3f04c41ac5 100644
--- a/python/pyspark/ml/feature.py
+++ b/python/pyspark/ml/feature.py
@@ -24,7 +24,7 @@ from pyspark.mllib.common import inherit_doc
__all__ = ['Binarizer', 'HashingTF', 'IDF', 'IDFModel', 'NGram', 'Normalizer', 'OneHotEncoder',
'PolynomialExpansion', 'RegexTokenizer', 'StandardScaler', 'StandardScalerModel',
'StringIndexer', 'StringIndexerModel', 'Tokenizer', 'VectorAssembler', 'VectorIndexer',
- 'Word2Vec', 'Word2VecModel', 'PCA', 'PCAModel']
+ 'Word2Vec', 'Word2VecModel', 'PCA', 'PCAModel', 'RFormula', 'RFormulaModel']
@inherit_doc
@@ -1110,6 +1110,89 @@ class PCAModel(JavaModel):
"""
+@inherit_doc
+class RFormula(JavaEstimator, HasFeaturesCol, HasLabelCol):
+ """
+ .. note:: Experimental
+
+ Implements the transforms required for fitting a dataset against an
+ R model formula. Currently we support a limited subset of the R
+ operators, including '~', '+', '-', and '.'. Also see the R formula
+ docs:
+ http://stat.ethz.ch/R-manual/R-patched/library/stats/html/formula.html
+
+ >>> df = sqlContext.createDataFrame([
+ ... (1.0, 1.0, "a"),
+ ... (0.0, 2.0, "b"),
+ ... (0.0, 0.0, "a")
+ ... ], ["y", "x", "s"])
+ >>> rf = RFormula(formula="y ~ x + s")
+ >>> rf.fit(df).transform(df).show()
+ +---+---+---+---------+-----+
+ | y| x| s| features|label|
+ +---+---+---+---------+-----+
+ |1.0|1.0| a|[1.0,1.0]| 1.0|
+ |0.0|2.0| b|[2.0,0.0]| 0.0|
+ |0.0|0.0| a|[0.0,1.0]| 0.0|
+ +---+---+---+---------+-----+
+ ...
+ >>> rf.fit(df, {rf.formula: "y ~ . - s"}).transform(df).show()
+ +---+---+---+--------+-----+
+ | y| x| s|features|label|
+ +---+---+---+--------+-----+
+ |1.0|1.0| a| [1.0]| 1.0|
+ |0.0|2.0| b| [2.0]| 0.0|
+ |0.0|0.0| a| [0.0]| 0.0|
+ +---+---+---+--------+-----+
+ ...
+ """
+
+ # a placeholder to make it appear in the generated doc
+ formula = Param(Params._dummy(), "formula", "R model formula")
+
+ @keyword_only
+ def __init__(self, formula=None, featuresCol="features", labelCol="label"):
+ """
+ __init__(self, formula=None, featuresCol="features", labelCol="label")
+ """
+ super(RFormula, self).__init__()
+ self._java_obj = self._new_java_obj("org.apache.spark.ml.feature.RFormula", self.uid)
+ self.formula = Param(self, "formula", "R model formula")
+ kwargs = self.__init__._input_kwargs
+ self.setParams(**kwargs)
+
+ @keyword_only
+ def setParams(self, formula=None, featuresCol="features", labelCol="label"):
+ """
+ setParams(self, formula=None, featuresCol="features", labelCol="label")
+ Sets params for RFormula.
+ """
+ kwargs = self.setParams._input_kwargs
+ return self._set(**kwargs)
+
+ def setFormula(self, value):
+ """
+ Sets the value of :py:attr:`formula`.
+ """
+ self._paramMap[self.formula] = value
+ return self
+
+ def getFormula(self):
+ """
+ Gets the value of :py:attr:`formula`.
+ """
+ return self.getOrDefault(self.formula)
+
+ def _create_model(self, java_model):
+ return RFormulaModel(java_model)
+
+
+class RFormulaModel(JavaModel):
+ """
+ Model fitted by :py:class:`RFormula`.
+ """
+
+
if __name__ == "__main__":
import doctest
from pyspark.context import SparkContext