aboutsummaryrefslogtreecommitdiff
path: root/graphx
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2015-04-03 01:25:02 -0700
committerReynold Xin <rxin@databricks.com>2015-04-03 01:25:02 -0700
commit82701ee25fda64f03899713bc56f82ca6f278151 (patch)
tree07fba36d66228f7561bd65dd502fd668d50a9be5 /graphx
parentc42c3fc7f7b79a1f6ce990d39b5d9d14ab19fcf0 (diff)
downloadspark-82701ee25fda64f03899713bc56f82ca6f278151.tar.gz
spark-82701ee25fda64f03899713bc56f82ca6f278151.tar.bz2
spark-82701ee25fda64f03899713bc56f82ca6f278151.zip
[SPARK-6428] Turn on explicit type checking for public methods.
This builds on my earlier pull requests and turns on the explicit type checking in scalastyle. Author: Reynold Xin <rxin@databricks.com> Closes #5342 from rxin/SPARK-6428 and squashes the following commits: 7b531ab [Reynold Xin] import ordering 2d9a8a5 [Reynold Xin] jl e668b1c [Reynold Xin] override 9b9e119 [Reynold Xin] Parenthesis. 82e0cf5 [Reynold Xin] [SPARK-6428] Turn on explicit type checking for public methods.
Diffstat (limited to 'graphx')
-rw-r--r--graphx/src/main/scala/org/apache/spark/graphx/EdgeContext.scala3
-rw-r--r--graphx/src/main/scala/org/apache/spark/graphx/EdgeDirection.scala12
-rw-r--r--graphx/src/main/scala/org/apache/spark/graphx/EdgeTriplet.scala2
-rw-r--r--graphx/src/main/scala/org/apache/spark/graphx/impl/EdgePartition.scala14
-rw-r--r--graphx/src/main/scala/org/apache/spark/graphx/impl/EdgeRDDImpl.scala4
-rw-r--r--graphx/src/main/scala/org/apache/spark/graphx/impl/ReplicatedVertexView.scala2
-rw-r--r--graphx/src/main/scala/org/apache/spark/graphx/impl/VertexRDDImpl.scala4
-rw-r--r--graphx/src/main/scala/org/apache/spark/graphx/lib/ConnectedComponents.scala2
-rw-r--r--graphx/src/main/scala/org/apache/spark/graphx/lib/LabelPropagation.scala4
-rw-r--r--graphx/src/main/scala/org/apache/spark/graphx/lib/PageRank.scala2
-rw-r--r--graphx/src/main/scala/org/apache/spark/graphx/util/collection/GraphXPrimitiveKeyOpenHashMap.scala8
11 files changed, 28 insertions, 29 deletions
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/EdgeContext.scala b/graphx/src/main/scala/org/apache/spark/graphx/EdgeContext.scala
index d8be02e202..23430179f1 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/EdgeContext.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/EdgeContext.scala
@@ -62,7 +62,6 @@ object EdgeContext {
* , _ + _)
* }}}
*/
- def unapply[VD, ED, A](edge: EdgeContext[VD, ED, A]) =
+ def unapply[VD, ED, A](edge: EdgeContext[VD, ED, A]): Some[(VertexId, VertexId, VD, VD, ED)] =
Some(edge.srcId, edge.dstId, edge.srcAttr, edge.dstAttr, edge.attr)
}
-
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/EdgeDirection.scala b/graphx/src/main/scala/org/apache/spark/graphx/EdgeDirection.scala
index 6f03eb1439..058c8c8aa1 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/EdgeDirection.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/EdgeDirection.scala
@@ -34,12 +34,12 @@ class EdgeDirection private (private val name: String) extends Serializable {
override def toString: String = "EdgeDirection." + name
- override def equals(o: Any) = o match {
+ override def equals(o: Any): Boolean = o match {
case other: EdgeDirection => other.name == name
case _ => false
}
- override def hashCode = name.hashCode
+ override def hashCode: Int = name.hashCode
}
@@ -48,14 +48,14 @@ class EdgeDirection private (private val name: String) extends Serializable {
*/
object EdgeDirection {
/** Edges arriving at a vertex. */
- final val In = new EdgeDirection("In")
+ final val In: EdgeDirection = new EdgeDirection("In")
/** Edges originating from a vertex. */
- final val Out = new EdgeDirection("Out")
+ final val Out: EdgeDirection = new EdgeDirection("Out")
/** Edges originating from *or* arriving at a vertex of interest. */
- final val Either = new EdgeDirection("Either")
+ final val Either: EdgeDirection = new EdgeDirection("Either")
/** Edges originating from *and* arriving at a vertex of interest. */
- final val Both = new EdgeDirection("Both")
+ final val Both: EdgeDirection = new EdgeDirection("Both")
}
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/EdgeTriplet.scala b/graphx/src/main/scala/org/apache/spark/graphx/EdgeTriplet.scala
index 9d473d5ebd..c8790cac3d 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/EdgeTriplet.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/EdgeTriplet.scala
@@ -62,7 +62,7 @@ class EdgeTriplet[VD, ED] extends Edge[ED] {
def vertexAttr(vid: VertexId): VD =
if (srcId == vid) srcAttr else { assert(dstId == vid); dstAttr }
- override def toString = ((srcId, srcAttr), (dstId, dstAttr), attr).toString()
+ override def toString: String = ((srcId, srcAttr), (dstId, dstAttr), attr).toString()
def toTuple: ((VertexId, VD), (VertexId, VD), ED) = ((srcId, srcAttr), (dstId, dstAttr), attr)
}
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/impl/EdgePartition.scala b/graphx/src/main/scala/org/apache/spark/graphx/impl/EdgePartition.scala
index 373af75448..c561570809 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/impl/EdgePartition.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/impl/EdgePartition.scala
@@ -324,7 +324,7 @@ class EdgePartition[
*
* @return an iterator over edges in the partition
*/
- def iterator = new Iterator[Edge[ED]] {
+ def iterator: Iterator[Edge[ED]] = new Iterator[Edge[ED]] {
private[this] val edge = new Edge[ED]
private[this] var pos = 0
@@ -351,7 +351,7 @@ class EdgePartition[
override def hasNext: Boolean = pos < EdgePartition.this.size
- override def next() = {
+ override def next(): EdgeTriplet[VD, ED] = {
val triplet = new EdgeTriplet[VD, ED]
val localSrcId = localSrcIds(pos)
val localDstId = localDstIds(pos)
@@ -518,11 +518,11 @@ private class AggregatingEdgeContext[VD, ED, A](
_attr = attr
}
- override def srcId = _srcId
- override def dstId = _dstId
- override def srcAttr = _srcAttr
- override def dstAttr = _dstAttr
- override def attr = _attr
+ override def srcId: VertexId = _srcId
+ override def dstId: VertexId = _dstId
+ override def srcAttr: VD = _srcAttr
+ override def dstAttr: VD = _dstAttr
+ override def attr: ED = _attr
override def sendToSrc(msg: A) {
send(_localSrcId, msg)
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/impl/EdgeRDDImpl.scala b/graphx/src/main/scala/org/apache/spark/graphx/impl/EdgeRDDImpl.scala
index 43a3aea0f6..c88b2f65a8 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/impl/EdgeRDDImpl.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/impl/EdgeRDDImpl.scala
@@ -70,9 +70,9 @@ class EdgeRDDImpl[ED: ClassTag, VD: ClassTag] private[graphx] (
this
}
- override def getStorageLevel = partitionsRDD.getStorageLevel
+ override def getStorageLevel: StorageLevel = partitionsRDD.getStorageLevel
- override def checkpoint() = {
+ override def checkpoint(): Unit = {
partitionsRDD.checkpoint()
}
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/impl/ReplicatedVertexView.scala b/graphx/src/main/scala/org/apache/spark/graphx/impl/ReplicatedVertexView.scala
index 8ab255bd40..1df86449fa 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/impl/ReplicatedVertexView.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/impl/ReplicatedVertexView.scala
@@ -50,7 +50,7 @@ class ReplicatedVertexView[VD: ClassTag, ED: ClassTag](
* Return a new `ReplicatedVertexView` where edges are reversed and shipping levels are swapped to
* match.
*/
- def reverse() = {
+ def reverse(): ReplicatedVertexView[VD, ED] = {
val newEdges = edges.mapEdgePartitions((pid, part) => part.reverse)
new ReplicatedVertexView(newEdges, hasDstId, hasSrcId)
}
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/impl/VertexRDDImpl.scala b/graphx/src/main/scala/org/apache/spark/graphx/impl/VertexRDDImpl.scala
index 349c8545bf..33ac7b0ed6 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/impl/VertexRDDImpl.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/impl/VertexRDDImpl.scala
@@ -71,9 +71,9 @@ class VertexRDDImpl[VD] private[graphx] (
this
}
- override def getStorageLevel = partitionsRDD.getStorageLevel
+ override def getStorageLevel: StorageLevel = partitionsRDD.getStorageLevel
- override def checkpoint() = {
+ override def checkpoint(): Unit = {
partitionsRDD.checkpoint()
}
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/lib/ConnectedComponents.scala b/graphx/src/main/scala/org/apache/spark/graphx/lib/ConnectedComponents.scala
index e2f6cc1389..859f896039 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/lib/ConnectedComponents.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/lib/ConnectedComponents.scala
@@ -37,7 +37,7 @@ object ConnectedComponents {
*/
def run[VD: ClassTag, ED: ClassTag](graph: Graph[VD, ED]): Graph[VertexId, ED] = {
val ccGraph = graph.mapVertices { case (vid, _) => vid }
- def sendMessage(edge: EdgeTriplet[VertexId, ED]) = {
+ def sendMessage(edge: EdgeTriplet[VertexId, ED]): Iterator[(VertexId, VertexId)] = {
if (edge.srcAttr < edge.dstAttr) {
Iterator((edge.dstId, edge.srcAttr))
} else if (edge.srcAttr > edge.dstAttr) {
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/lib/LabelPropagation.scala b/graphx/src/main/scala/org/apache/spark/graphx/lib/LabelPropagation.scala
index 82e9e06515..2bcf8684b8 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/lib/LabelPropagation.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/lib/LabelPropagation.scala
@@ -43,7 +43,7 @@ object LabelPropagation {
*/
def run[VD, ED: ClassTag](graph: Graph[VD, ED], maxSteps: Int): Graph[VertexId, ED] = {
val lpaGraph = graph.mapVertices { case (vid, _) => vid }
- def sendMessage(e: EdgeTriplet[VertexId, ED]) = {
+ def sendMessage(e: EdgeTriplet[VertexId, ED]): Iterator[(VertexId, Map[VertexId, VertexId])] = {
Iterator((e.srcId, Map(e.dstAttr -> 1L)), (e.dstId, Map(e.srcAttr -> 1L)))
}
def mergeMessage(count1: Map[VertexId, Long], count2: Map[VertexId, Long])
@@ -54,7 +54,7 @@ object LabelPropagation {
i -> (count1Val + count2Val)
}.toMap
}
- def vertexProgram(vid: VertexId, attr: Long, message: Map[VertexId, Long]) = {
+ def vertexProgram(vid: VertexId, attr: Long, message: Map[VertexId, Long]): VertexId = {
if (message.isEmpty) attr else message.maxBy(_._2)._1
}
val initialMessage = Map[VertexId, Long]()
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/lib/PageRank.scala b/graphx/src/main/scala/org/apache/spark/graphx/lib/PageRank.scala
index 570440ba44..042e366a29 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/lib/PageRank.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/lib/PageRank.scala
@@ -156,7 +156,7 @@ object PageRank extends Logging {
(newPR, newPR - oldPR)
}
- def sendMessage(edge: EdgeTriplet[(Double, Double), Double]) = {
+ def sendMessage(edge: EdgeTriplet[(Double, Double), Double]): Iterator[(VertexId, Double)] = {
if (edge.srcAttr._2 > tol) {
Iterator((edge.dstId, edge.srcAttr._2 * edge.attr))
} else {
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/util/collection/GraphXPrimitiveKeyOpenHashMap.scala b/graphx/src/main/scala/org/apache/spark/graphx/util/collection/GraphXPrimitiveKeyOpenHashMap.scala
index 57b01b6f2e..e2754ea699 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/util/collection/GraphXPrimitiveKeyOpenHashMap.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/util/collection/GraphXPrimitiveKeyOpenHashMap.scala
@@ -56,7 +56,7 @@ class GraphXPrimitiveKeyOpenHashMap[@specialized(Long, Int) K: ClassTag,
private var _oldValues: Array[V] = null
- override def size = keySet.size
+ override def size: Int = keySet.size
/** Get the value for a given key */
def apply(k: K): V = {
@@ -112,7 +112,7 @@ class GraphXPrimitiveKeyOpenHashMap[@specialized(Long, Int) K: ClassTag,
}
}
- override def iterator = new Iterator[(K, V)] {
+ override def iterator: Iterator[(K, V)] = new Iterator[(K, V)] {
var pos = 0
var nextPair: (K, V) = computeNextPair()
@@ -128,9 +128,9 @@ class GraphXPrimitiveKeyOpenHashMap[@specialized(Long, Int) K: ClassTag,
}
}
- def hasNext = nextPair != null
+ def hasNext: Boolean = nextPair != null
- def next() = {
+ def next(): (K, V) = {
val pair = nextPair
nextPair = computeNextPair()
pair