aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJoseph K. Bradley <joseph@databricks.com>2015-09-11 08:55:35 -0700
committerXiangrui Meng <meng@databricks.com>2015-09-11 08:55:35 -0700
commit2e3a280754a28dc36a71b9ff988e34cbf457f6c3 (patch)
tree9e2ab19bf0265f82fa85fb4b6daadcc858fbadb0
parent960d2d0ac6b5a22242a922f87f745f7d1f736181 (diff)
downloadspark-2e3a280754a28dc36a71b9ff988e34cbf457f6c3.tar.gz
spark-2e3a280754a28dc36a71b9ff988e34cbf457f6c3.tar.bz2
spark-2e3a280754a28dc36a71b9ff988e34cbf457f6c3.zip
[MINOR] [MLLIB] [ML] [DOC] Minor doc fixes for StringIndexer and MetadataUtils
Changes: * Make Scala doc for StringIndexerInverse clearer. Also remove Scala doc from transformSchema, so that the doc is inherited. * MetadataUtils.scala: “ Helper utilities for tree-based algorithms” —> not just trees anymore CC: holdenk mengxr Author: Joseph K. Bradley <joseph@databricks.com> Closes #8679 from jkbradley/doc-fixes-1.5.
-rw-r--r--mllib/src/main/scala/org/apache/spark/ml/feature/StringIndexer.scala31
-rw-r--r--mllib/src/main/scala/org/apache/spark/ml/util/MetadataUtils.scala2
-rw-r--r--python/pyspark/ml/feature.py16
3 files changed, 20 insertions, 29 deletions
diff --git a/mllib/src/main/scala/org/apache/spark/ml/feature/StringIndexer.scala b/mllib/src/main/scala/org/apache/spark/ml/feature/StringIndexer.scala
index b6482ffe0b..3a4ab9a857 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/feature/StringIndexer.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/feature/StringIndexer.scala
@@ -181,10 +181,10 @@ class StringIndexerModel (
/**
* :: Experimental ::
- * A [[Transformer]] that maps a column of string indices back to a new column of corresponding
- * string values using either the ML attributes of the input column, or if provided using the labels
- * supplied by the user.
- * All original columns are kept during transformation.
+ * A [[Transformer]] that maps a column of indices back to a new column of corresponding
+ * string values.
+ * The index-string mapping is either from the ML attributes of the input column,
+ * or from user-supplied labels (which take precedence over ML attributes).
*
* @see [[StringIndexer]] for converting strings into indices
*/
@@ -202,32 +202,23 @@ class IndexToString private[ml] (
/** @group setParam */
def setOutputCol(value: String): this.type = set(outputCol, value)
- /**
- * Optional labels to be provided by the user, if not supplied column
- * metadata is read for labels. The default value is an empty array,
- * but the empty array is ignored and column metadata used instead.
- * @group setParam
- */
+ /** @group setParam */
def setLabels(value: Array[String]): this.type = set(labels, value)
/**
- * Param for array of labels.
- * Optional labels to be provided by the user.
- * Default: Empty array, in which case column metadata is used for labels.
+ * Optional param for array of labels specifying index-string mapping.
+ *
+ * Default: Empty array, in which case [[inputCol]] metadata is used for labels.
* @group param
*/
final val labels: StringArrayParam = new StringArrayParam(this, "labels",
- "array of labels, if not provided metadata from inputCol is used instead.")
+ "Optional array of labels specifying index-string mapping." +
+ " If not provided or if empty, then metadata from inputCol is used instead.")
setDefault(labels, Array.empty[String])
- /**
- * Optional labels to be provided by the user, if not supplied column
- * metadata is read for labels.
- * @group getParam
- */
+ /** @group getParam */
final def getLabels: Array[String] = $(labels)
- /** Transform the schema for the inverse transformation */
override def transformSchema(schema: StructType): StructType = {
val inputColName = $(inputCol)
val inputDataType = schema(inputColName).dataType
diff --git a/mllib/src/main/scala/org/apache/spark/ml/util/MetadataUtils.scala b/mllib/src/main/scala/org/apache/spark/ml/util/MetadataUtils.scala
index fcb517b5f7..96a38a3bde 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/util/MetadataUtils.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/util/MetadataUtils.scala
@@ -25,7 +25,7 @@ import org.apache.spark.sql.types.StructField
/**
- * Helper utilities for tree-based algorithms
+ * Helper utilities for algorithms using ML metadata
*/
private[spark] object MetadataUtils {
diff --git a/python/pyspark/ml/feature.py b/python/pyspark/ml/feature.py
index 71dc636b83..97cbee73a0 100644
--- a/python/pyspark/ml/feature.py
+++ b/python/pyspark/ml/feature.py
@@ -985,17 +985,17 @@ class IndexToString(JavaTransformer, HasInputCol, HasOutputCol):
"""
.. note:: Experimental
- A :py:class:`Transformer` that maps a column of string indices back to a new column of
- corresponding string values using either the ML attributes of the input column, or if
- provided using the labels supplied by the user.
- All original columns are kept during transformation.
+ A :py:class:`Transformer` that maps a column of indices back to a new column of
+ corresponding string values.
+ The index-string mapping is either from the ML attributes of the input column,
+ or from user-supplied labels (which take precedence over ML attributes).
See L{StringIndexer} for converting strings into indices.
"""
# a placeholder to make the labels show up in generated doc
labels = Param(Params._dummy(), "labels",
- "Optional array of labels to be provided by the user, if not supplied or " +
- "empty, column metadata is read for labels")
+ "Optional array of labels specifying index-string mapping." +
+ " If not provided or if empty, then metadata from inputCol is used instead.")
@keyword_only
def __init__(self, inputCol=None, outputCol=None, labels=None):
@@ -1006,8 +1006,8 @@ class IndexToString(JavaTransformer, HasInputCol, HasOutputCol):
self._java_obj = self._new_java_obj("org.apache.spark.ml.feature.IndexToString",
self.uid)
self.labels = Param(self, "labels",
- "Optional array of labels to be provided by the user, if not " +
- "supplied or empty, column metadata is read for labels")
+ "Optional array of labels specifying index-string mapping. If not" +
+ " provided or if empty, then metadata from inputCol is used instead.")
kwargs = self.__init__._input_kwargs
self.setParams(**kwargs)