1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.feature
import org.apache.spark.SparkFunSuite
import org.apache.spark.ml.param.ParamsSuite
import org.apache.spark.ml.util.DefaultReadWriteTest
import org.apache.spark.mllib.feature.{IDFModel => OldIDFModel}
import org.apache.spark.mllib.linalg.{DenseVector, SparseVector, Vector, Vectors}
import org.apache.spark.mllib.util.MLlibTestSparkContext
import org.apache.spark.mllib.util.TestingUtils._
import org.apache.spark.sql.Row
class IDFSuite extends SparkFunSuite with MLlibTestSparkContext with DefaultReadWriteTest {
def scaleDataWithIDF(dataSet: Array[Vector], model: Vector): Array[Vector] = {
dataSet.map {
case data: DenseVector =>
val res = data.toArray.zip(model.toArray).map { case (x, y) => x * y }
Vectors.dense(res)
case data: SparseVector =>
val res = data.indices.zip(data.values).map { case (id, value) =>
(id, value * model(id))
}
Vectors.sparse(data.size, res)
}
}
test("params") {
ParamsSuite.checkParams(new IDF)
val model = new IDFModel("idf", new OldIDFModel(Vectors.dense(1.0)))
ParamsSuite.checkParams(model)
}
test("compute IDF with default parameter") {
val numOfFeatures = 4
val data = Array(
Vectors.sparse(numOfFeatures, Array(1, 3), Array(1.0, 2.0)),
Vectors.dense(0.0, 1.0, 2.0, 3.0),
Vectors.sparse(numOfFeatures, Array(1), Array(1.0))
)
val numOfData = data.size
val idf = Vectors.dense(Array(0, 3, 1, 2).map { x =>
math.log((numOfData + 1.0) / (x + 1.0))
})
val expected = scaleDataWithIDF(data, idf)
val df = sqlContext.createDataFrame(data.zip(expected)).toDF("features", "expected")
val idfModel = new IDF()
.setInputCol("features")
.setOutputCol("idfValue")
.fit(df)
idfModel.transform(df).select("idfValue", "expected").collect().foreach {
case Row(x: Vector, y: Vector) =>
assert(x ~== y absTol 1e-5, "Transformed vector is different with expected vector.")
}
}
test("compute IDF with setter") {
val numOfFeatures = 4
val data = Array(
Vectors.sparse(numOfFeatures, Array(1, 3), Array(1.0, 2.0)),
Vectors.dense(0.0, 1.0, 2.0, 3.0),
Vectors.sparse(numOfFeatures, Array(1), Array(1.0))
)
val numOfData = data.size
val idf = Vectors.dense(Array(0, 3, 1, 2).map { x =>
if (x > 0) math.log((numOfData + 1.0) / (x + 1.0)) else 0
})
val expected = scaleDataWithIDF(data, idf)
val df = sqlContext.createDataFrame(data.zip(expected)).toDF("features", "expected")
val idfModel = new IDF()
.setInputCol("features")
.setOutputCol("idfValue")
.setMinDocFreq(1)
.fit(df)
idfModel.transform(df).select("idfValue", "expected").collect().foreach {
case Row(x: Vector, y: Vector) =>
assert(x ~== y absTol 1e-5, "Transformed vector is different with expected vector.")
}
}
test("IDF read/write") {
val t = new IDF()
.setInputCol("myInputCol")
.setOutputCol("myOutputCol")
.setMinDocFreq(5)
testDefaultReadWrite(t)
}
test("IDFModel read/write") {
val instance = new IDFModel("myIDFModel", new OldIDFModel(Vectors.dense(1.0, 2.0)))
.setInputCol("myInputCol")
.setOutputCol("myOutputCol")
val newInstance = testDefaultReadWrite(instance)
assert(newInstance.idf === instance.idf)
}
}
|