diff options
author | witgo <witgo@qq.com> | 2014-07-08 00:31:42 -0700 |
---|---|---|
committer | Reynold Xin <rxin@apache.org> | 2014-07-08 00:31:42 -0700 |
commit | 3cd5029be709307415f911236472a685e406e763 (patch) | |
tree | 451f83c229c0fd422617117233e534077335f905 /sql/core | |
parent | 0128905eea9f8c597ca238b14c18908995511e76 (diff) | |
download | spark-3cd5029be709307415f911236472a685e406e763.tar.gz spark-3cd5029be709307415f911236472a685e406e763.tar.bz2 spark-3cd5029be709307415f911236472a685e406e763.zip |
Resolve sbt warnings during build Ⅱ
Author: witgo <witgo@qq.com>
Closes #1153 from witgo/expectResult and squashes the following commits:
97541d8 [witgo] merge master
ead26e7 [witgo] Resolve sbt warnings during build
Diffstat (limited to 'sql/core')
7 files changed, 37 insertions, 37 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnStatsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnStatsSuite.scala index 78640b876d..6f0d46d816 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnStatsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnStatsSuite.scala @@ -39,7 +39,7 @@ class ColumnStatsSuite extends FunSuite { test(s"$columnStatsName: empty") { val columnStats = columnStatsClass.newInstance() - expectResult(columnStats.initialBounds, "Wrong initial bounds") { + assertResult(columnStats.initialBounds, "Wrong initial bounds") { (columnStats.lowerBound, columnStats.upperBound) } } @@ -54,8 +54,8 @@ class ColumnStatsSuite extends FunSuite { val values = rows.map(_.head.asInstanceOf[T#JvmType]) val ordering = columnType.dataType.ordering.asInstanceOf[Ordering[T#JvmType]] - expectResult(values.min(ordering), "Wrong lower bound")(columnStats.lowerBound) - expectResult(values.max(ordering), "Wrong upper bound")(columnStats.upperBound) + assertResult(values.min(ordering), "Wrong lower bound")(columnStats.lowerBound) + assertResult(values.max(ordering), "Wrong upper bound")(columnStats.upperBound) } } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnTypeSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnTypeSuite.scala index 71be410567..314b7d317e 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnTypeSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnTypeSuite.scala @@ -35,7 +35,7 @@ class ColumnTypeSuite extends FunSuite with Logging { BOOLEAN -> 1, STRING -> 8, BINARY -> 16, GENERIC -> 16) checks.foreach { case (columnType, expectedSize) => - expectResult(expectedSize, s"Wrong defaultSize for $columnType") { + assertResult(expectedSize, s"Wrong defaultSize for $columnType") { columnType.defaultSize } } @@ -47,7 +47,7 @@ class ColumnTypeSuite extends FunSuite with Logging { value: JvmType, expected: Int) { - expectResult(expected, s"Wrong actualSize for $columnType") { + assertResult(expected, s"Wrong actualSize for $columnType") { columnType.actualSize(value) } } @@ -127,7 +127,7 @@ class ColumnTypeSuite extends FunSuite with Logging { val length = buffer.getInt() assert(length === serializedObj.length) - expectResult(obj, "Deserialized object didn't equal to the original object") { + assertResult(obj, "Deserialized object didn't equal to the original object") { val bytes = new Array[Byte](length) buffer.get(bytes, 0, length) SparkSqlSerializer.deserialize(bytes) @@ -136,7 +136,7 @@ class ColumnTypeSuite extends FunSuite with Logging { buffer.rewind() buffer.putInt(serializedObj.length).put(serializedObj) - expectResult(obj, "Deserialized object didn't equal to the original object") { + assertResult(obj, "Deserialized object didn't equal to the original object") { buffer.rewind() SparkSqlSerializer.deserialize(GENERIC.extract(buffer)) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/NullableColumnBuilderSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/columnar/NullableColumnBuilderSuite.scala index d9d1e1bfdd..d8898527ba 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/NullableColumnBuilderSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/columnar/NullableColumnBuilderSuite.scala @@ -48,8 +48,8 @@ class NullableColumnBuilderSuite extends FunSuite { val columnBuilder = TestNullableColumnBuilder(columnType) val buffer = columnBuilder.build() - expectResult(columnType.typeId, "Wrong column type ID")(buffer.getInt()) - expectResult(0, "Wrong null count")(buffer.getInt()) + assertResult(columnType.typeId, "Wrong column type ID")(buffer.getInt()) + assertResult(0, "Wrong null count")(buffer.getInt()) assert(!buffer.hasRemaining) } @@ -63,8 +63,8 @@ class NullableColumnBuilderSuite extends FunSuite { val buffer = columnBuilder.build() - expectResult(columnType.typeId, "Wrong column type ID")(buffer.getInt()) - expectResult(0, "Wrong null count")(buffer.getInt()) + assertResult(columnType.typeId, "Wrong column type ID")(buffer.getInt()) + assertResult(0, "Wrong null count")(buffer.getInt()) } test(s"$typeName column builder: null values") { @@ -79,11 +79,11 @@ class NullableColumnBuilderSuite extends FunSuite { val buffer = columnBuilder.build() - expectResult(columnType.typeId, "Wrong column type ID")(buffer.getInt()) - expectResult(4, "Wrong null count")(buffer.getInt()) + assertResult(columnType.typeId, "Wrong column type ID")(buffer.getInt()) + assertResult(4, "Wrong null count")(buffer.getInt()) // For null positions - (1 to 7 by 2).foreach(expectResult(_, "Wrong null position")(buffer.getInt())) + (1 to 7 by 2).foreach(assertResult(_, "Wrong null position")(buffer.getInt())) // For non-null values (0 until 4).foreach { _ => diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/BooleanBitSetSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/BooleanBitSetSuite.scala index 93259a19b9..5fba004809 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/BooleanBitSetSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/BooleanBitSetSuite.scala @@ -48,18 +48,18 @@ class BooleanBitSetSuite extends FunSuite { } // 4 extra bytes for compression scheme type ID - expectResult(headerSize + compressedSize, "Wrong buffer capacity")(buffer.capacity) + assertResult(headerSize + compressedSize, "Wrong buffer capacity")(buffer.capacity) // Skips column header buffer.position(headerSize) - expectResult(BooleanBitSet.typeId, "Wrong compression scheme ID")(buffer.getInt()) - expectResult(count, "Wrong element count")(buffer.getInt()) + assertResult(BooleanBitSet.typeId, "Wrong compression scheme ID")(buffer.getInt()) + assertResult(count, "Wrong element count")(buffer.getInt()) var word = 0: Long for (i <- 0 until count) { val bit = i % BITS_PER_LONG word = if (bit == 0) buffer.getLong() else word - expectResult(values(i), s"Wrong value in compressed buffer, index=$i") { + assertResult(values(i), s"Wrong value in compressed buffer, index=$i") { (word & ((1: Long) << bit)) != 0 } } @@ -75,7 +75,7 @@ class BooleanBitSetSuite extends FunSuite { if (values.nonEmpty) { values.foreach { assert(decoder.hasNext) - expectResult(_, "Wrong decoded value")(decoder.next()) + assertResult(_, "Wrong decoded value")(decoder.next()) } } assert(!decoder.hasNext) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/DictionaryEncodingSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/DictionaryEncodingSuite.scala index 198dcd8819..d8ae2a2677 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/DictionaryEncodingSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/DictionaryEncodingSuite.scala @@ -71,22 +71,22 @@ class DictionaryEncodingSuite extends FunSuite { // 2 bytes for each `Short` val compressedSize = 4 + dictionarySize + 2 * inputSeq.length // 4 extra bytes for compression scheme type ID - expectResult(headerSize + compressedSize, "Wrong buffer capacity")(buffer.capacity) + assertResult(headerSize + compressedSize, "Wrong buffer capacity")(buffer.capacity) // Skips column header buffer.position(headerSize) - expectResult(DictionaryEncoding.typeId, "Wrong compression scheme ID")(buffer.getInt()) + assertResult(DictionaryEncoding.typeId, "Wrong compression scheme ID")(buffer.getInt()) val dictionary = buildDictionary(buffer).toMap dictValues.foreach { i => - expectResult(i, "Wrong dictionary entry") { + assertResult(i, "Wrong dictionary entry") { dictionary(values(i)) } } inputSeq.foreach { i => - expectResult(i.toShort, "Wrong column element value")(buffer.getShort()) + assertResult(i.toShort, "Wrong column element value")(buffer.getShort()) } // ------------- @@ -101,7 +101,7 @@ class DictionaryEncodingSuite extends FunSuite { if (inputSeq.nonEmpty) { inputSeq.foreach { i => assert(decoder.hasNext) - expectResult(values(i), "Wrong decoded value")(decoder.next()) + assertResult(values(i), "Wrong decoded value")(decoder.next()) } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/IntegralDeltaSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/IntegralDeltaSuite.scala index 46af6e001c..17619dcf97 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/IntegralDeltaSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/IntegralDeltaSuite.scala @@ -69,21 +69,21 @@ class IntegralDeltaSuite extends FunSuite { }) // 4 extra bytes for compression scheme type ID - expectResult(headerSize + compressedSize, "Wrong buffer capacity")(buffer.capacity) + assertResult(headerSize + compressedSize, "Wrong buffer capacity")(buffer.capacity) buffer.position(headerSize) - expectResult(scheme.typeId, "Wrong compression scheme ID")(buffer.getInt()) + assertResult(scheme.typeId, "Wrong compression scheme ID")(buffer.getInt()) if (input.nonEmpty) { - expectResult(Byte.MinValue, "The first byte should be an escaping mark")(buffer.get()) - expectResult(input.head, "The first value is wrong")(columnType.extract(buffer)) + assertResult(Byte.MinValue, "The first byte should be an escaping mark")(buffer.get()) + assertResult(input.head, "The first value is wrong")(columnType.extract(buffer)) (input.tail, deltas).zipped.foreach { (value, delta) => if (math.abs(delta) <= Byte.MaxValue) { - expectResult(delta, "Wrong delta")(buffer.get()) + assertResult(delta, "Wrong delta")(buffer.get()) } else { - expectResult(Byte.MinValue, "Expecting escaping mark here")(buffer.get()) - expectResult(value, "Wrong value")(columnType.extract(buffer)) + assertResult(Byte.MinValue, "Expecting escaping mark here")(buffer.get()) + assertResult(value, "Wrong value")(columnType.extract(buffer)) } } } @@ -99,7 +99,7 @@ class IntegralDeltaSuite extends FunSuite { if (input.nonEmpty) { input.foreach{ assert(decoder.hasNext) - expectResult(_, "Wrong decoded value")(decoder.next()) + assertResult(_, "Wrong decoded value")(decoder.next()) } } assert(!decoder.hasNext) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/RunLengthEncodingSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/RunLengthEncodingSuite.scala index d3b73ba19d..40115beb98 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/RunLengthEncodingSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/RunLengthEncodingSuite.scala @@ -61,15 +61,15 @@ class RunLengthEncodingSuite extends FunSuite { }.sum // 4 extra bytes for compression scheme type ID - expectResult(headerSize + compressedSize, "Wrong buffer capacity")(buffer.capacity) + assertResult(headerSize + compressedSize, "Wrong buffer capacity")(buffer.capacity) // Skips column header buffer.position(headerSize) - expectResult(RunLengthEncoding.typeId, "Wrong compression scheme ID")(buffer.getInt()) + assertResult(RunLengthEncoding.typeId, "Wrong compression scheme ID")(buffer.getInt()) inputRuns.foreach { case (index, run) => - expectResult(values(index), "Wrong column element value")(columnType.extract(buffer)) - expectResult(run, "Wrong run length")(buffer.getInt()) + assertResult(values(index), "Wrong column element value")(columnType.extract(buffer)) + assertResult(run, "Wrong run length")(buffer.getInt()) } // ------------- @@ -84,7 +84,7 @@ class RunLengthEncodingSuite extends FunSuite { if (inputSeq.nonEmpty) { inputSeq.foreach { i => assert(decoder.hasNext) - expectResult(values(i), "Wrong decoded value")(decoder.next()) + assertResult(values(i), "Wrong decoded value")(decoder.next()) } } |