aboutsummaryrefslogtreecommitdiff
path: root/core/src/test/scala
diff options
context:
space:
mode:
authorLiwei Lin <lwlin7@gmail.com>2016-09-07 10:04:00 +0100
committerSean Owen <sowen@cloudera.com>2016-09-07 10:04:00 +0100
commit3ce3a282c8463408f9a2db93c1748e8df8087e07 (patch)
tree7814535174f3ef7294cfd20e4dfeae28fecd4693 /core/src/test/scala
parent9fccde4ff80fb0fd65a9e90eb3337965e4349de4 (diff)
downloadspark-3ce3a282c8463408f9a2db93c1748e8df8087e07.tar.gz
spark-3ce3a282c8463408f9a2db93c1748e8df8087e07.tar.bz2
spark-3ce3a282c8463408f9a2db93c1748e8df8087e07.zip
[SPARK-17359][SQL][MLLIB] Use ArrayBuffer.+=(A) instead of ArrayBuffer.append(A) in performance critical paths
## What changes were proposed in this pull request? We should generally use `ArrayBuffer.+=(A)` rather than `ArrayBuffer.append(A)`, because `append(A)` would involve extra boxing / unboxing. ## How was this patch tested? N/A Author: Liwei Lin <lwlin7@gmail.com> Closes #14914 from lw-lin/append_to_plus_eq_v2.
Diffstat (limited to 'core/src/test/scala')
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/IvyTestUtils.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/memory/MemoryManagerSuite.scala3
-rw-r--r--core/src/test/scala/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriterSuite.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala4
4 files changed, 5 insertions, 6 deletions
diff --git a/core/src/test/scala/org/apache/spark/deploy/IvyTestUtils.scala b/core/src/test/scala/org/apache/spark/deploy/IvyTestUtils.scala
index 9ecf49b598..c9b3d657c2 100644
--- a/core/src/test/scala/org/apache/spark/deploy/IvyTestUtils.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/IvyTestUtils.scala
@@ -305,7 +305,7 @@ private[deploy] object IvyTestUtils {
val allFiles = ArrayBuffer[(String, File)](javaFile)
if (withPython) {
val pythonFile = createPythonFile(root)
- allFiles.append((pythonFile.getName, pythonFile))
+ allFiles += Tuple2(pythonFile.getName, pythonFile)
}
if (withR) {
val rFiles = createRFiles(root, className, artifact.groupId)
diff --git a/core/src/test/scala/org/apache/spark/memory/MemoryManagerSuite.scala b/core/src/test/scala/org/apache/spark/memory/MemoryManagerSuite.scala
index 38bf7e5e5a..eb2b3ffd15 100644
--- a/core/src/test/scala/org/apache/spark/memory/MemoryManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/memory/MemoryManagerSuite.scala
@@ -118,8 +118,7 @@ private[memory] trait MemoryManagerSuite extends SparkFunSuite with BeforeAndAft
if (numBytesToFree <= mm.storageMemoryUsed) {
// We can evict enough blocks to fulfill the request for space
mm.releaseStorageMemory(numBytesToFree, MemoryMode.ON_HEAP)
- evictedBlocks.append(
- (null, BlockStatus(StorageLevel.MEMORY_ONLY, numBytesToFree, 0L)))
+ evictedBlocks += Tuple2(null, BlockStatus(StorageLevel.MEMORY_ONLY, numBytesToFree, 0L))
numBytesToFree
} else {
// No blocks were evicted because eviction would not free enough space.
diff --git a/core/src/test/scala/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriterSuite.scala b/core/src/test/scala/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriterSuite.scala
index ed9428820f..442941685f 100644
--- a/core/src/test/scala/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriterSuite.scala
+++ b/core/src/test/scala/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriterSuite.scala
@@ -107,7 +107,7 @@ class BypassMergeSortShuffleWriterSuite extends SparkFunSuite with BeforeAndAfte
val blockId = new TempShuffleBlockId(UUID.randomUUID)
val file = new File(tempDir, blockId.name)
blockIdToFileMap.put(blockId, file)
- temporaryFilesCreated.append(file)
+ temporaryFilesCreated += file
(blockId, file)
}
})
diff --git a/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala b/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala
index c342b68f46..2695295d45 100644
--- a/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala
@@ -150,12 +150,12 @@ class SizeEstimatorSuite
val buf = new ArrayBuffer[DummyString]()
for (i <- 0 until 5000) {
- buf.append(new DummyString(new Array[Char](10)))
+ buf += new DummyString(new Array[Char](10))
}
assertResult(340016)(SizeEstimator.estimate(buf.toArray))
for (i <- 0 until 5000) {
- buf.append(new DummyString(arr))
+ buf += new DummyString(arr)
}
assertResult(683912)(SizeEstimator.estimate(buf.toArray))