aboutsummaryrefslogtreecommitdiff
path: root/mllib
diff options
context:
space:
mode:
Diffstat (limited to 'mllib')
-rw-r--r--mllib/src/main/scala/org/apache/spark/ml/feature/ElementwiseProduct.scala13
-rw-r--r--mllib/src/test/scala/org/apache/spark/ml/feature/ElementwiseProductSuite.scala35
2 files changed, 45 insertions, 3 deletions
diff --git a/mllib/src/main/scala/org/apache/spark/ml/feature/ElementwiseProduct.scala b/mllib/src/main/scala/org/apache/spark/ml/feature/ElementwiseProduct.scala
index 1e758cb775..2c7ffdb7ba 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/feature/ElementwiseProduct.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/feature/ElementwiseProduct.scala
@@ -17,10 +17,10 @@
package org.apache.spark.ml.feature
-import org.apache.spark.annotation.Experimental
+import org.apache.spark.annotation.{Experimental, Since}
import org.apache.spark.ml.UnaryTransformer
import org.apache.spark.ml.param.Param
-import org.apache.spark.ml.util.Identifiable
+import org.apache.spark.ml.util.{DefaultParamsReadable, DefaultParamsWritable, Identifiable}
import org.apache.spark.mllib.feature
import org.apache.spark.mllib.linalg.{Vector, VectorUDT}
import org.apache.spark.sql.types.DataType
@@ -33,7 +33,7 @@ import org.apache.spark.sql.types.DataType
*/
@Experimental
class ElementwiseProduct(override val uid: String)
- extends UnaryTransformer[Vector, Vector, ElementwiseProduct] {
+ extends UnaryTransformer[Vector, Vector, ElementwiseProduct] with DefaultParamsWritable {
def this() = this(Identifiable.randomUID("elemProd"))
@@ -57,3 +57,10 @@ class ElementwiseProduct(override val uid: String)
override protected def outputDataType: DataType = new VectorUDT()
}
+
+@Since("2.0.0")
+object ElementwiseProduct extends DefaultParamsReadable[ElementwiseProduct] {
+
+ @Since("2.0.0")
+ override def load(path: String): ElementwiseProduct = super.load(path)
+}
diff --git a/mllib/src/test/scala/org/apache/spark/ml/feature/ElementwiseProductSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/feature/ElementwiseProductSuite.scala
new file mode 100644
index 0000000000..fc1c05de23
--- /dev/null
+++ b/mllib/src/test/scala/org/apache/spark/ml/feature/ElementwiseProductSuite.scala
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.ml.feature
+
+import org.apache.spark.SparkFunSuite
+import org.apache.spark.ml.util.DefaultReadWriteTest
+import org.apache.spark.mllib.linalg.Vectors
+import org.apache.spark.mllib.util.MLlibTestSparkContext
+
+class ElementwiseProductSuite
+ extends SparkFunSuite with MLlibTestSparkContext with DefaultReadWriteTest {
+
+ test("read/write") {
+ val ep = new ElementwiseProduct()
+ .setInputCol("myInputCol")
+ .setOutputCol("myOutputCol")
+ .setScalingVec(Vectors.dense(0.1, 0.2))
+ testDefaultReadWrite(ep)
+ }
+}