aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDavies Liu <davies@databricks.com>2016-04-10 18:10:44 -0700
committerDavies Liu <davies.liu@gmail.com>2016-04-10 18:10:44 -0700
commitfbf8d008833c985d0e222dd2360c7f7375caa68a (patch)
tree9cbdf187349935871b1fc51f8cc16128a07d7d8a
parenta7ce473bd0520c71154ed028f295dab64a7485fe (diff)
downloadspark-fbf8d008833c985d0e222dd2360c7f7375caa68a.tar.gz
spark-fbf8d008833c985d0e222dd2360c7f7375caa68a.tar.bz2
spark-fbf8d008833c985d0e222dd2360c7f7375caa68a.zip
[SPARK-14419] [MINOR] coding style cleanup
## What changes were proposed in this pull request? Making them more consistent. ## How was this patch tested? Existing tests. Author: Davies Liu <davies@databricks.com> Closes #12289 from davies/cleanup_style.
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/TungstenAggregate.scala2
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala35
2 files changed, 13 insertions, 24 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/TungstenAggregate.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/TungstenAggregate.scala
index 692fef703f..253592028c 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/TungstenAggregate.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/TungstenAggregate.scala
@@ -454,7 +454,7 @@ case class TungstenAggregate(
val thisPlan = ctx.addReferenceObj("plan", this)
hashMapTerm = ctx.freshName("hashMap")
val hashMapClassName = classOf[UnsafeFixedWidthAggregationMap].getName
- ctx.addMutableState(hashMapClassName, hashMapTerm, s"")
+ ctx.addMutableState(hashMapClassName, hashMapTerm, "")
sorterTerm = ctx.freshName("sorter")
ctx.addMutableState(classOf[UnsafeKVExternalSorter].getName, sorterTerm, "")
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala
index 68b5486faa..0427db4e3b 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala
@@ -122,13 +122,12 @@ private[joins] class UnsafeHashedRelation(
override def keyIsUnique: Boolean = binaryMap.numKeys() == binaryMap.numValues()
- override def asReadOnlyCopy(): UnsafeHashedRelation =
+ override def asReadOnlyCopy(): UnsafeHashedRelation = {
new UnsafeHashedRelation(numFields, binaryMap)
-
- override def estimatedSize: Long = {
- binaryMap.getTotalMemoryConsumption
}
+ override def estimatedSize: Long = binaryMap.getTotalMemoryConsumption
+
// re-used in get()/getValue()
var resultRow = new UnsafeRow(numFields)
@@ -374,8 +373,9 @@ private[execution] final class LongToUnsafeRowMap(var mm: TaskMemoryManager, cap
// do not support spilling
val got = mm.acquireExecutionMemory(size, MemoryMode.ON_HEAP, this)
if (got < size) {
- mm.releaseExecutionMemory(got, MemoryMode.ON_HEAP, this)
- throw new SparkException(s"Can't acquire $size bytes memory to build hash relation")
+ freeMemory(got)
+ throw new SparkException(s"Can't acquire $size bytes memory to build hash relation, " +
+ s"got $got bytes")
}
}
@@ -396,9 +396,7 @@ private[execution] final class LongToUnsafeRowMap(var mm: TaskMemoryManager, cap
init()
- def spill(size: Long, trigger: MemoryConsumer): Long = {
- 0L
- }
+ def spill(size: Long, trigger: MemoryConsumer): Long = 0L
/**
* Returns whether all the keys are unique.
@@ -408,9 +406,7 @@ private[execution] final class LongToUnsafeRowMap(var mm: TaskMemoryManager, cap
/**
* Returns total memory consumption.
*/
- def getTotalMemoryConsumption: Long = {
- array.length * 8 + page.length
- }
+ def getTotalMemoryConsumption: Long = array.length * 8 + page.length
/**
* Returns the first slot of array that store the keys (sparse mode).
@@ -423,9 +419,7 @@ private[execution] final class LongToUnsafeRowMap(var mm: TaskMemoryManager, cap
/**
* Returns the next probe in the array.
*/
- private def nextSlot(pos: Int): Int = {
- (pos + 2) & mask
- }
+ private def nextSlot(pos: Int): Int = (pos + 2) & mask
private def getRow(address: Long, resultRow: UnsafeRow): UnsafeRow = {
val offset = address >>> 32
@@ -674,9 +668,7 @@ private[joins] class LongHashedRelation(
override def asReadOnlyCopy(): LongHashedRelation = new LongHashedRelation(nFields, map)
- override def estimatedSize: Long = {
- map.getTotalMemoryConsumption
- }
+ override def estimatedSize: Long = map.getTotalMemoryConsumption
override def get(key: InternalRow): Iterator[InternalRow] = {
if (key.isNullAt(0)) {
@@ -694,12 +686,9 @@ private[joins] class LongHashedRelation(
}
}
- override def get(key: Long): Iterator[InternalRow] =
- map.get(key, resultRow)
+ override def get(key: Long): Iterator[InternalRow] = map.get(key, resultRow)
- override def getValue(key: Long): InternalRow = {
- map.getValue(key, resultRow)
- }
+ override def getValue(key: Long): InternalRow = map.getValue(key, resultRow)
override def keyIsUnique: Boolean = map.keyIsUnique