aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorJosh Rosen <joshrosen@databricks.com>2015-07-20 22:38:05 -0700
committerJosh Rosen <joshrosen@databricks.com>2015-07-20 22:38:05 -0700
commitc032b0bf92130dc4facb003f0deaeb1228aefded (patch)
tree2a48b364f0afb53e1ff10dac554b54d54abfa11b /core
parent4d97be95300f729391c17b4c162e3c7fba09b8bf (diff)
downloadspark-c032b0bf92130dc4facb003f0deaeb1228aefded.tar.gz
spark-c032b0bf92130dc4facb003f0deaeb1228aefded.tar.bz2
spark-c032b0bf92130dc4facb003f0deaeb1228aefded.zip
[SPARK-8797] [SPARK-9146] [SPARK-9145] [SPARK-9147] Support NaN ordering and equality comparisons in Spark SQL
This patch addresses an issue where queries that sorted float or double columns containing NaN values could fail with "Comparison method violates its general contract!" errors from TimSort. The root of this problem is that `NaN > anything`, `NaN == anything`, and `NaN < anything` all return `false`. Per the design specified in SPARK-9079, we have decided that `NaN = NaN` should return true and that NaN should appear last when sorting in ascending order (i.e. it is larger than any other numeric value). In addition to implementing these semantics, this patch also adds canonicalization of NaN values in UnsafeRow, which is necessary in order to be able to do binary equality comparisons on equal NaNs that might have different bit representations (see SPARK-9147). Author: Josh Rosen <joshrosen@databricks.com> Closes #7194 from JoshRosen/nan and squashes the following commits: 983d4fc [Josh Rosen] Merge remote-tracking branch 'origin/master' into nan 88bd73c [Josh Rosen] Fix Row.equals() a702e2e [Josh Rosen] normalization -> canonicalization a7267cf [Josh Rosen] Normalize NaNs in UnsafeRow fe629ae [Josh Rosen] Merge remote-tracking branch 'origin/master' into nan fbb2a29 [Josh Rosen] Fix NaN comparisons in BinaryComparison expressions c1fd4fe [Josh Rosen] Fold NaN test into existing test framework b31eb19 [Josh Rosen] Uncomment failing tests 7fe67af [Josh Rosen] Support NaN == NaN (SPARK-9145) 58bad2c [Josh Rosen] Revert "Compare rows' string representations to work around NaN incomparability." fc6b4d2 [Josh Rosen] Update CodeGenerator 3998ef2 [Josh Rosen] Remove unused code a2ba2e7 [Josh Rosen] Fix prefix comparision for NaNs a30d371 [Josh Rosen] Compare rows' string representations to work around NaN incomparability. 6f03f85 [Josh Rosen] Fix bug in Double / Float ordering 42a1ad5 [Josh Rosen] Stop filtering NaNs in UnsafeExternalSortSuite bfca524 [Josh Rosen] Change ordering so that NaN is maximum value. 8d7be61 [Josh Rosen] Update randomized test to use ScalaTest's assume() b20837b [Josh Rosen] Add failing test for new NaN comparision ordering 5b88b2b [Josh Rosen] Fix compilation of CodeGenerationSuite d907b5b [Josh Rosen] Merge remote-tracking branch 'origin/master' into nan 630ebc5 [Josh Rosen] Specify an ordering for NaN values. 9bf195a [Josh Rosen] Re-enable NaNs in CodeGenerationSuite to produce more regression tests 13fc06a [Josh Rosen] Add regression test for NaN sorting issue f9efbb5 [Josh Rosen] Fix ORDER BY NULL e7dc4fb [Josh Rosen] Add very generic test for ordering 7d5c13e [Josh Rosen] Add regression test for SPARK-8782 (ORDER BY NULL) b55875a [Josh Rosen] Generate doubles and floats over entire possible range. 5acdd5c [Josh Rosen] Infinity and NaN are interesting. ab76cbd [Josh Rosen] Move code to Catalyst package. d2b4a4a [Josh Rosen] Add random data generator test utilities to Spark SQL.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java5
-rw-r--r--core/src/main/scala/org/apache/spark/util/Utils.scala28
-rw-r--r--core/src/test/scala/org/apache/spark/util/UtilsSuite.scala31
-rw-r--r--core/src/test/scala/org/apache/spark/util/collection/unsafe/sort/PrefixComparatorsSuite.scala25
4 files changed, 87 insertions, 2 deletions
diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java
index 438742565c..bf1bc5dffb 100644
--- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java
+++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java
@@ -23,6 +23,7 @@ import com.google.common.primitives.UnsignedBytes;
import org.apache.spark.annotation.Private;
import org.apache.spark.unsafe.types.UTF8String;
+import org.apache.spark.util.Utils;
@Private
public class PrefixComparators {
@@ -82,7 +83,7 @@ public class PrefixComparators {
public int compare(long aPrefix, long bPrefix) {
float a = Float.intBitsToFloat((int) aPrefix);
float b = Float.intBitsToFloat((int) bPrefix);
- return (a < b) ? -1 : (a > b) ? 1 : 0;
+ return Utils.nanSafeCompareFloats(a, b);
}
public long computePrefix(float value) {
@@ -97,7 +98,7 @@ public class PrefixComparators {
public int compare(long aPrefix, long bPrefix) {
double a = Double.longBitsToDouble(aPrefix);
double b = Double.longBitsToDouble(bPrefix);
- return (a < b) ? -1 : (a > b) ? 1 : 0;
+ return Utils.nanSafeCompareDoubles(a, b);
}
public long computePrefix(double value) {
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index e6374f17d8..c5816949cd 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -1586,6 +1586,34 @@ private[spark] object Utils extends Logging {
hashAbs
}
+ /**
+ * NaN-safe version of [[java.lang.Double.compare()]] which allows NaN values to be compared
+ * according to semantics where NaN == NaN and NaN > any non-NaN double.
+ */
+ def nanSafeCompareDoubles(x: Double, y: Double): Int = {
+ val xIsNan: Boolean = java.lang.Double.isNaN(x)
+ val yIsNan: Boolean = java.lang.Double.isNaN(y)
+ if ((xIsNan && yIsNan) || (x == y)) 0
+ else if (xIsNan) 1
+ else if (yIsNan) -1
+ else if (x > y) 1
+ else -1
+ }
+
+ /**
+ * NaN-safe version of [[java.lang.Float.compare()]] which allows NaN values to be compared
+ * according to semantics where NaN == NaN and NaN > any non-NaN float.
+ */
+ def nanSafeCompareFloats(x: Float, y: Float): Int = {
+ val xIsNan: Boolean = java.lang.Float.isNaN(x)
+ val yIsNan: Boolean = java.lang.Float.isNaN(y)
+ if ((xIsNan && yIsNan) || (x == y)) 0
+ else if (xIsNan) 1
+ else if (yIsNan) -1
+ else if (x > y) 1
+ else -1
+ }
+
/** Returns the system properties map that is thread-safe to iterator over. It gets the
* properties which have been set explicitly, as well as those for which only a default value
* has been defined. */
diff --git a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
index c7638507c8..8f7e402d5f 100644
--- a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
@@ -18,6 +18,7 @@
package org.apache.spark.util
import java.io.{File, ByteArrayOutputStream, ByteArrayInputStream, FileOutputStream}
+import java.lang.{Double => JDouble, Float => JFloat}
import java.net.{BindException, ServerSocket, URI}
import java.nio.{ByteBuffer, ByteOrder}
import java.text.DecimalFormatSymbols
@@ -689,4 +690,34 @@ class UtilsSuite extends SparkFunSuite with ResetSystemProperties with Logging {
// scalastyle:on println
assert(buffer.toString === "t circular test circular\n")
}
+
+ test("nanSafeCompareDoubles") {
+ def shouldMatchDefaultOrder(a: Double, b: Double): Unit = {
+ assert(Utils.nanSafeCompareDoubles(a, b) === JDouble.compare(a, b))
+ assert(Utils.nanSafeCompareDoubles(b, a) === JDouble.compare(b, a))
+ }
+ shouldMatchDefaultOrder(0d, 0d)
+ shouldMatchDefaultOrder(0d, 1d)
+ shouldMatchDefaultOrder(Double.MinValue, Double.MaxValue)
+ assert(Utils.nanSafeCompareDoubles(Double.NaN, Double.NaN) === 0)
+ assert(Utils.nanSafeCompareDoubles(Double.NaN, Double.PositiveInfinity) === 1)
+ assert(Utils.nanSafeCompareDoubles(Double.NaN, Double.NegativeInfinity) === 1)
+ assert(Utils.nanSafeCompareDoubles(Double.PositiveInfinity, Double.NaN) === -1)
+ assert(Utils.nanSafeCompareDoubles(Double.NegativeInfinity, Double.NaN) === -1)
+ }
+
+ test("nanSafeCompareFloats") {
+ def shouldMatchDefaultOrder(a: Float, b: Float): Unit = {
+ assert(Utils.nanSafeCompareFloats(a, b) === JFloat.compare(a, b))
+ assert(Utils.nanSafeCompareFloats(b, a) === JFloat.compare(b, a))
+ }
+ shouldMatchDefaultOrder(0f, 0f)
+ shouldMatchDefaultOrder(1f, 1f)
+ shouldMatchDefaultOrder(Float.MinValue, Float.MaxValue)
+ assert(Utils.nanSafeCompareFloats(Float.NaN, Float.NaN) === 0)
+ assert(Utils.nanSafeCompareFloats(Float.NaN, Float.PositiveInfinity) === 1)
+ assert(Utils.nanSafeCompareFloats(Float.NaN, Float.NegativeInfinity) === 1)
+ assert(Utils.nanSafeCompareFloats(Float.PositiveInfinity, Float.NaN) === -1)
+ assert(Utils.nanSafeCompareFloats(Float.NegativeInfinity, Float.NaN) === -1)
+ }
}
diff --git a/core/src/test/scala/org/apache/spark/util/collection/unsafe/sort/PrefixComparatorsSuite.scala b/core/src/test/scala/org/apache/spark/util/collection/unsafe/sort/PrefixComparatorsSuite.scala
index dd505dfa7d..dc03e374b5 100644
--- a/core/src/test/scala/org/apache/spark/util/collection/unsafe/sort/PrefixComparatorsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/collection/unsafe/sort/PrefixComparatorsSuite.scala
@@ -47,4 +47,29 @@ class PrefixComparatorsSuite extends SparkFunSuite with PropertyChecks {
forAll (regressionTests) { (s1: String, s2: String) => testPrefixComparison(s1, s2) }
forAll { (s1: String, s2: String) => testPrefixComparison(s1, s2) }
}
+
+ test("float prefix comparator handles NaN properly") {
+ val nan1: Float = java.lang.Float.intBitsToFloat(0x7f800001)
+ val nan2: Float = java.lang.Float.intBitsToFloat(0x7fffffff)
+ assert(nan1.isNaN)
+ assert(nan2.isNaN)
+ val nan1Prefix = PrefixComparators.FLOAT.computePrefix(nan1)
+ val nan2Prefix = PrefixComparators.FLOAT.computePrefix(nan2)
+ assert(nan1Prefix === nan2Prefix)
+ val floatMaxPrefix = PrefixComparators.FLOAT.computePrefix(Float.MaxValue)
+ assert(PrefixComparators.FLOAT.compare(nan1Prefix, floatMaxPrefix) === 1)
+ }
+
+ test("double prefix comparator handles NaNs properly") {
+ val nan1: Double = java.lang.Double.longBitsToDouble(0x7ff0000000000001L)
+ val nan2: Double = java.lang.Double.longBitsToDouble(0x7fffffffffffffffL)
+ assert(nan1.isNaN)
+ assert(nan2.isNaN)
+ val nan1Prefix = PrefixComparators.DOUBLE.computePrefix(nan1)
+ val nan2Prefix = PrefixComparators.DOUBLE.computePrefix(nan2)
+ assert(nan1Prefix === nan2Prefix)
+ val doubleMaxPrefix = PrefixComparators.DOUBLE.computePrefix(Double.MaxValue)
+ assert(PrefixComparators.DOUBLE.compare(nan1Prefix, doubleMaxPrefix) === 1)
+ }
+
}