aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorPete Robbins <robbinspg@gmail.com>2016-05-10 09:53:56 -0700
committerDavies Liu <davies.liu@gmail.com>2016-05-10 09:53:56 -0700
commit2dfb9cd1f7e7f0438ce571aae7e3a7b77d4082b7 (patch)
tree02c1a052775f84af6b7f842a1144bfe48eef3f51 /sql
parent8a12580d25b1ce5abc45c600483fad69f90ca333 (diff)
downloadspark-2dfb9cd1f7e7f0438ce571aae7e3a7b77d4082b7.tar.gz
spark-2dfb9cd1f7e7f0438ce571aae7e3a7b77d4082b7.tar.bz2
spark-2dfb9cd1f7e7f0438ce571aae7e3a7b77d4082b7.zip
[SPARK-15154] [SQL] Change key types to Long in tests
## What changes were proposed in this pull request? As reported in the Jira the 2 tests changed here are using a key of type Integer where the Spark sql code assumes the type is Long. This PR changes the tests to use the correct key types. ## How was this patch tested? Test builds run on both Big Endian and Little Endian platforms Author: Pete Robbins <robbinspg@gmail.com> Closes #13009 from robbinspg/HashedRelationSuiteFix.
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/joins/HashedRelationSuite.scala28
1 files changed, 14 insertions, 14 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/joins/HashedRelationSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/joins/HashedRelationSuite.scala
index 9826a64fe2..b7b08dc4b1 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/joins/HashedRelationSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/joins/HashedRelationSuite.scala
@@ -25,7 +25,7 @@ import org.apache.spark.serializer.KryoSerializer
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.test.SharedSQLContext
-import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
+import org.apache.spark.sql.types.{IntegerType, LongType, StringType, StructField, StructType}
import org.apache.spark.unsafe.map.BytesToBytesMap
import org.apache.spark.unsafe.types.UTF8String
import org.apache.spark.util.collection.CompactBuffer
@@ -112,14 +112,14 @@ class HashedRelationSuite extends SparkFunSuite with SharedSQLContext {
test("LongToUnsafeRowMap") {
val unsafeProj = UnsafeProjection.create(
- Seq(BoundReference(0, IntegerType, false), BoundReference(1, IntegerType, true)))
- val rows = (0 until 100).map(i => unsafeProj(InternalRow(i, i + 1)).copy())
- val key = Seq(BoundReference(0, IntegerType, false))
+ Seq(BoundReference(0, LongType, false), BoundReference(1, IntegerType, true)))
+ val rows = (0 until 100).map(i => unsafeProj(InternalRow(Int.int2long(i), i + 1)).copy())
+ val key = Seq(BoundReference(0, LongType, false))
val longRelation = LongHashedRelation(rows.iterator, key, 10, mm)
assert(longRelation.keyIsUnique)
(0 until 100).foreach { i =>
val row = longRelation.getValue(i)
- assert(row.getInt(0) === i)
+ assert(row.getLong(0) === i)
assert(row.getInt(1) === i + 1)
}
@@ -128,9 +128,9 @@ class HashedRelationSuite extends SparkFunSuite with SharedSQLContext {
(0 until 100).foreach { i =>
val rows = longRelation2.get(i).toArray
assert(rows.length === 2)
- assert(rows(0).getInt(0) === i)
+ assert(rows(0).getLong(0) === i)
assert(rows(0).getInt(1) === i + 1)
- assert(rows(1).getInt(0) === i)
+ assert(rows(1).getLong(0) === i)
assert(rows(1).getInt(1) === i + 1)
}
@@ -145,9 +145,9 @@ class HashedRelationSuite extends SparkFunSuite with SharedSQLContext {
(0 until 100).foreach { i =>
val rows = relation.get(i).toArray
assert(rows.length === 2)
- assert(rows(0).getInt(0) === i)
+ assert(rows(0).getLong(0) === i)
assert(rows(0).getInt(1) === i + 1)
- assert(rows(1).getInt(0) === i)
+ assert(rows(1).getLong(0) === i)
assert(rows(1).getInt(1) === i + 1)
}
}
@@ -155,20 +155,20 @@ class HashedRelationSuite extends SparkFunSuite with SharedSQLContext {
test("Spark-14521") {
val ser = new KryoSerializer(
(new SparkConf).set("spark.kryo.referenceTracking", "false")).newInstance()
- val key = Seq(BoundReference(0, IntegerType, false))
+ val key = Seq(BoundReference(0, LongType, false))
// Testing Kryo serialization of HashedRelation
val unsafeProj = UnsafeProjection.create(
- Seq(BoundReference(0, IntegerType, false), BoundReference(1, IntegerType, true)))
- val rows = (0 until 100).map(i => unsafeProj(InternalRow(i, i + 1)).copy())
+ Seq(BoundReference(0, LongType, false), BoundReference(1, IntegerType, true)))
+ val rows = (0 until 100).map(i => unsafeProj(InternalRow(Int.int2long(i), i + 1)).copy())
val longRelation = LongHashedRelation(rows.iterator ++ rows.iterator, key, 100, mm)
val longRelation2 = ser.deserialize[LongHashedRelation](ser.serialize(longRelation))
(0 until 100).foreach { i =>
val rows = longRelation2.get(i).toArray
assert(rows.length === 2)
- assert(rows(0).getInt(0) === i)
+ assert(rows(0).getLong(0) === i)
assert(rows(0).getInt(1) === i + 1)
- assert(rows(1).getInt(0) === i)
+ assert(rows(1).getLong(0) === i)
assert(rows(1).getInt(1) === i + 1)
}