aboutsummaryrefslogtreecommitdiff
path: root/graphx/src
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2014-12-31 17:07:47 -0800
committerReynold Xin <rxin@databricks.com>2014-12-31 17:07:47 -0800
commit7749dd6c36a182478b20f4636734c8db0b7ddb00 (patch)
tree2466b0a5c668cf6579a9d982241da097033621f9 /graphx/src
parent4bb12488d56ea651c56d9688996b464b99095582 (diff)
downloadspark-7749dd6c36a182478b20f4636734c8db0b7ddb00.tar.gz
spark-7749dd6c36a182478b20f4636734c8db0b7ddb00.tar.bz2
spark-7749dd6c36a182478b20f4636734c8db0b7ddb00.zip
[SPARK-5038] Add explicit return type for implicit functions.
As we learned in #3580, not explicitly typing implicit functions can lead to compiler bugs and potentially unexpected runtime behavior. This is a follow up PR for rest of Spark (outside Spark SQL). The original PR for Spark SQL can be found at https://github.com/apache/spark/pull/3859 Author: Reynold Xin <rxin@databricks.com> Closes #3860 from rxin/implicit and squashes the following commits: 73702f9 [Reynold Xin] [SPARK-5038] Add explicit return type for implicit functions.
Diffstat (limited to 'graphx/src')
-rw-r--r--graphx/src/main/scala/org/apache/spark/graphx/impl/EdgePartitionBuilder.scala63
-rw-r--r--graphx/src/main/scala/org/apache/spark/graphx/impl/ShippableVertexPartition.scala4
-rw-r--r--graphx/src/main/scala/org/apache/spark/graphx/impl/VertexPartition.scala4
-rw-r--r--graphx/src/main/scala/org/apache/spark/graphx/impl/VertexPartitionBaseOps.scala4
4 files changed, 38 insertions, 37 deletions
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/impl/EdgePartitionBuilder.scala b/graphx/src/main/scala/org/apache/spark/graphx/impl/EdgePartitionBuilder.scala
index 409cf60977..906d42328f 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/impl/EdgePartitionBuilder.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/impl/EdgePartitionBuilder.scala
@@ -129,44 +129,45 @@ private[impl] case class EdgeWithLocalIds[@specialized ED](
srcId: VertexId, dstId: VertexId, localSrcId: Int, localDstId: Int, attr: ED)
private[impl] object EdgeWithLocalIds {
- implicit def lexicographicOrdering[ED] = new Ordering[EdgeWithLocalIds[ED]] {
- override def compare(a: EdgeWithLocalIds[ED], b: EdgeWithLocalIds[ED]): Int = {
- if (a.srcId == b.srcId) {
- if (a.dstId == b.dstId) 0
- else if (a.dstId < b.dstId) -1
+ implicit def lexicographicOrdering[ED]: Ordering[EdgeWithLocalIds[ED]] =
+ new Ordering[EdgeWithLocalIds[ED]] {
+ override def compare(a: EdgeWithLocalIds[ED], b: EdgeWithLocalIds[ED]): Int = {
+ if (a.srcId == b.srcId) {
+ if (a.dstId == b.dstId) 0
+ else if (a.dstId < b.dstId) -1
+ else 1
+ } else if (a.srcId < b.srcId) -1
else 1
- } else if (a.srcId < b.srcId) -1
- else 1
+ }
}
- }
- private[graphx] def edgeArraySortDataFormat[ED]
- = new SortDataFormat[EdgeWithLocalIds[ED], Array[EdgeWithLocalIds[ED]]] {
- override def getKey(
- data: Array[EdgeWithLocalIds[ED]], pos: Int): EdgeWithLocalIds[ED] = {
- data(pos)
- }
+ private[graphx] def edgeArraySortDataFormat[ED] = {
+ new SortDataFormat[EdgeWithLocalIds[ED], Array[EdgeWithLocalIds[ED]]] {
+ override def getKey(data: Array[EdgeWithLocalIds[ED]], pos: Int): EdgeWithLocalIds[ED] = {
+ data(pos)
+ }
- override def swap(data: Array[EdgeWithLocalIds[ED]], pos0: Int, pos1: Int): Unit = {
- val tmp = data(pos0)
- data(pos0) = data(pos1)
- data(pos1) = tmp
- }
+ override def swap(data: Array[EdgeWithLocalIds[ED]], pos0: Int, pos1: Int): Unit = {
+ val tmp = data(pos0)
+ data(pos0) = data(pos1)
+ data(pos1) = tmp
+ }
- override def copyElement(
- src: Array[EdgeWithLocalIds[ED]], srcPos: Int,
- dst: Array[EdgeWithLocalIds[ED]], dstPos: Int) {
- dst(dstPos) = src(srcPos)
- }
+ override def copyElement(
+ src: Array[EdgeWithLocalIds[ED]], srcPos: Int,
+ dst: Array[EdgeWithLocalIds[ED]], dstPos: Int) {
+ dst(dstPos) = src(srcPos)
+ }
- override def copyRange(
- src: Array[EdgeWithLocalIds[ED]], srcPos: Int,
- dst: Array[EdgeWithLocalIds[ED]], dstPos: Int, length: Int) {
- System.arraycopy(src, srcPos, dst, dstPos, length)
- }
+ override def copyRange(
+ src: Array[EdgeWithLocalIds[ED]], srcPos: Int,
+ dst: Array[EdgeWithLocalIds[ED]], dstPos: Int, length: Int) {
+ System.arraycopy(src, srcPos, dst, dstPos, length)
+ }
- override def allocate(length: Int): Array[EdgeWithLocalIds[ED]] = {
- new Array[EdgeWithLocalIds[ED]](length)
+ override def allocate(length: Int): Array[EdgeWithLocalIds[ED]] = {
+ new Array[EdgeWithLocalIds[ED]](length)
+ }
}
}
}
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/impl/ShippableVertexPartition.scala b/graphx/src/main/scala/org/apache/spark/graphx/impl/ShippableVertexPartition.scala
index 5412d72047..aa320088f2 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/impl/ShippableVertexPartition.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/impl/ShippableVertexPartition.scala
@@ -74,8 +74,8 @@ object ShippableVertexPartition {
* Implicit conversion to allow invoking `VertexPartitionBase` operations directly on a
* `ShippableVertexPartition`.
*/
- implicit def shippablePartitionToOps[VD: ClassTag](partition: ShippableVertexPartition[VD]) =
- new ShippableVertexPartitionOps(partition)
+ implicit def shippablePartitionToOps[VD: ClassTag](partition: ShippableVertexPartition[VD])
+ : ShippableVertexPartitionOps[VD] = new ShippableVertexPartitionOps(partition)
/**
* Implicit evidence that `ShippableVertexPartition` is a member of the
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/impl/VertexPartition.scala b/graphx/src/main/scala/org/apache/spark/graphx/impl/VertexPartition.scala
index 55c7a19d1b..fbe53acfc3 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/impl/VertexPartition.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/impl/VertexPartition.scala
@@ -38,8 +38,8 @@ private[graphx] object VertexPartition {
* Implicit conversion to allow invoking `VertexPartitionBase` operations directly on a
* `VertexPartition`.
*/
- implicit def partitionToOps[VD: ClassTag](partition: VertexPartition[VD]) =
- new VertexPartitionOps(partition)
+ implicit def partitionToOps[VD: ClassTag](partition: VertexPartition[VD])
+ : VertexPartitionOps[VD] = new VertexPartitionOps(partition)
/**
* Implicit evidence that `VertexPartition` is a member of the `VertexPartitionBaseOpsConstructor`
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/impl/VertexPartitionBaseOps.scala b/graphx/src/main/scala/org/apache/spark/graphx/impl/VertexPartitionBaseOps.scala
index b40aa1b417..4fd2548b7f 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/impl/VertexPartitionBaseOps.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/impl/VertexPartitionBaseOps.scala
@@ -238,8 +238,8 @@ private[graphx] abstract class VertexPartitionBaseOps
* because these methods return a `Self` and this implicit conversion re-wraps that in a
* `VertexPartitionBaseOps`. This relies on the context bound on `Self`.
*/
- private implicit def toOps[VD2: ClassTag](
- partition: Self[VD2]): VertexPartitionBaseOps[VD2, Self] = {
+ private implicit def toOps[VD2: ClassTag](partition: Self[VD2])
+ : VertexPartitionBaseOps[VD2, Self] = {
implicitly[VertexPartitionBaseOpsConstructor[Self]].toOps(partition)
}
}