aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark/ml
diff options
context:
space:
mode:
Diffstat (limited to 'python/pyspark/ml')
-rw-r--r--python/pyspark/ml/classification.py3
-rw-r--r--python/pyspark/ml/feature.py3
-rw-r--r--python/pyspark/ml/pipeline.py3
-rw-r--r--python/pyspark/ml/util.py14
-rw-r--r--python/pyspark/ml/wrapper.py2
5 files changed, 7 insertions, 18 deletions
diff --git a/python/pyspark/ml/classification.py b/python/pyspark/ml/classification.py
index b6de7493d7..4ff7463498 100644
--- a/python/pyspark/ml/classification.py
+++ b/python/pyspark/ml/classification.py
@@ -15,10 +15,11 @@
# limitations under the License.
#
-from pyspark.ml.util import inherit_doc, keyword_only
+from pyspark.ml.util import keyword_only
from pyspark.ml.wrapper import JavaEstimator, JavaModel
from pyspark.ml.param.shared import HasFeaturesCol, HasLabelCol, HasPredictionCol, HasMaxIter,\
HasRegParam
+from pyspark.mllib.common import inherit_doc
__all__ = ['LogisticRegression', 'LogisticRegressionModel']
diff --git a/python/pyspark/ml/feature.py b/python/pyspark/ml/feature.py
index f1ddbb478d..433b4fb5d2 100644
--- a/python/pyspark/ml/feature.py
+++ b/python/pyspark/ml/feature.py
@@ -16,8 +16,9 @@
#
from pyspark.ml.param.shared import HasInputCol, HasOutputCol, HasNumFeatures
-from pyspark.ml.util import inherit_doc, keyword_only
+from pyspark.ml.util import keyword_only
from pyspark.ml.wrapper import JavaTransformer
+from pyspark.mllib.common import inherit_doc
__all__ = ['Tokenizer', 'HashingTF']
diff --git a/python/pyspark/ml/pipeline.py b/python/pyspark/ml/pipeline.py
index 18d8a58f35..5233c5801e 100644
--- a/python/pyspark/ml/pipeline.py
+++ b/python/pyspark/ml/pipeline.py
@@ -18,7 +18,8 @@
from abc import ABCMeta, abstractmethod
from pyspark.ml.param import Param, Params
-from pyspark.ml.util import inherit_doc, keyword_only
+from pyspark.ml.util import keyword_only
+from pyspark.mllib.common import inherit_doc
__all__ = ['Estimator', 'Transformer', 'Pipeline', 'PipelineModel']
diff --git a/python/pyspark/ml/util.py b/python/pyspark/ml/util.py
index 81d3f0882b..6f7f39c40e 100644
--- a/python/pyspark/ml/util.py
+++ b/python/pyspark/ml/util.py
@@ -19,20 +19,6 @@ from functools import wraps
import uuid
-def inherit_doc(cls):
- for name, func in vars(cls).items():
- # only inherit docstring for public functions
- if name.startswith("_"):
- continue
- if not func.__doc__:
- for parent in cls.__bases__:
- parent_func = getattr(parent, name, None)
- if parent_func and getattr(parent_func, "__doc__", None):
- func.__doc__ = parent_func.__doc__
- break
- return cls
-
-
def keyword_only(func):
"""
A decorator that forces keyword arguments in the wrapped method
diff --git a/python/pyspark/ml/wrapper.py b/python/pyspark/ml/wrapper.py
index 9e12ddc3d9..4bae96f678 100644
--- a/python/pyspark/ml/wrapper.py
+++ b/python/pyspark/ml/wrapper.py
@@ -21,7 +21,7 @@ from pyspark import SparkContext
from pyspark.sql import DataFrame
from pyspark.ml.param import Params
from pyspark.ml.pipeline import Estimator, Transformer
-from pyspark.ml.util import inherit_doc
+from pyspark.mllib.common import inherit_doc
def _jvm():