aboutsummaryrefslogtreecommitdiff
path: root/core/src/main/scala/org/apache/spark/partial/GroupedCountEvaluator.scala
diff options
context:
space:
mode:
Diffstat (limited to 'core/src/main/scala/org/apache/spark/partial/GroupedCountEvaluator.scala')
-rw-r--r--core/src/main/scala/org/apache/spark/partial/GroupedCountEvaluator.scala79
1 files changed, 79 insertions, 0 deletions
diff --git a/core/src/main/scala/org/apache/spark/partial/GroupedCountEvaluator.scala b/core/src/main/scala/org/apache/spark/partial/GroupedCountEvaluator.scala
new file mode 100644
index 0000000000..e519e3a548
--- /dev/null
+++ b/core/src/main/scala/org/apache/spark/partial/GroupedCountEvaluator.scala
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.partial
+
+import java.util.{HashMap => JHashMap}
+import java.util.{Map => JMap}
+
+import scala.collection.Map
+import scala.collection.mutable.HashMap
+import scala.collection.JavaConversions.mapAsScalaMap
+
+import cern.jet.stat.Probability
+
+import it.unimi.dsi.fastutil.objects.{Object2LongOpenHashMap => OLMap}
+
+/**
+ * An ApproximateEvaluator for counts by key. Returns a map of key to confidence interval.
+ */
+private[spark] class GroupedCountEvaluator[T](totalOutputs: Int, confidence: Double)
+ extends ApproximateEvaluator[OLMap[T], Map[T, BoundedDouble]] {
+
+ var outputsMerged = 0
+ var sums = new OLMap[T] // Sum of counts for each key
+
+ override def merge(outputId: Int, taskResult: OLMap[T]) {
+ outputsMerged += 1
+ val iter = taskResult.object2LongEntrySet.fastIterator()
+ while (iter.hasNext) {
+ val entry = iter.next()
+ sums.put(entry.getKey, sums.getLong(entry.getKey) + entry.getLongValue)
+ }
+ }
+
+ override def currentResult(): Map[T, BoundedDouble] = {
+ if (outputsMerged == totalOutputs) {
+ val result = new JHashMap[T, BoundedDouble](sums.size)
+ val iter = sums.object2LongEntrySet.fastIterator()
+ while (iter.hasNext) {
+ val entry = iter.next()
+ val sum = entry.getLongValue()
+ result(entry.getKey) = new BoundedDouble(sum, 1.0, sum, sum)
+ }
+ result
+ } else if (outputsMerged == 0) {
+ new HashMap[T, BoundedDouble]
+ } else {
+ val p = outputsMerged.toDouble / totalOutputs
+ val confFactor = Probability.normalInverse(1 - (1 - confidence) / 2)
+ val result = new JHashMap[T, BoundedDouble](sums.size)
+ val iter = sums.object2LongEntrySet.fastIterator()
+ while (iter.hasNext) {
+ val entry = iter.next()
+ val sum = entry.getLongValue
+ val mean = (sum + 1 - p) / p
+ val variance = (sum + 1) * (1 - p) / (p * p)
+ val stdev = math.sqrt(variance)
+ val low = mean - confFactor * stdev
+ val high = mean + confFactor * stdev
+ result(entry.getKey) = new BoundedDouble(mean, confidence, low, high)
+ }
+ result
+ }
+ }
+}