aboutsummaryrefslogtreecommitdiff
path: root/graphx/src/test/scala/org
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2015-05-28 20:17:16 -0700
committerReynold Xin <rxin@databricks.com>2015-05-28 20:17:16 -0700
commitb069ad23d9b6cbfb3a8bf245547add4816669075 (patch)
tree82498e8fa61ae0399d1d7f2eefb04e521285da53 /graphx/src/test/scala/org
parent7f7505d8db7759ea46e904f767c23130eff1104a (diff)
downloadspark-b069ad23d9b6cbfb3a8bf245547add4816669075.tar.gz
spark-b069ad23d9b6cbfb3a8bf245547add4816669075.tar.bz2
spark-b069ad23d9b6cbfb3a8bf245547add4816669075.zip
[SPARK-7927] whitespace fixes for GraphX.
So we can enable a whitespace enforcement rule in the style checker to save code review time. Author: Reynold Xin <rxin@databricks.com> Closes #6474 from rxin/whitespace-graphx and squashes the following commits: 4d3cd26 [Reynold Xin] Fixed tests. 869dde4 [Reynold Xin] [SPARK-7927] whitespace fixes for GraphX.
Diffstat (limited to 'graphx/src/test/scala/org')
-rw-r--r--graphx/src/test/scala/org/apache/spark/graphx/GraphOpsSuite.scala6
-rw-r--r--graphx/src/test/scala/org/apache/spark/graphx/GraphSuite.scala6
-rw-r--r--graphx/src/test/scala/org/apache/spark/graphx/lib/ConnectedComponentsSuite.scala15
-rw-r--r--graphx/src/test/scala/org/apache/spark/graphx/lib/PageRankSuite.scala14
-rw-r--r--graphx/src/test/scala/org/apache/spark/graphx/lib/TriangleCountSuite.scala2
5 files changed, 23 insertions, 20 deletions
diff --git a/graphx/src/test/scala/org/apache/spark/graphx/GraphOpsSuite.scala b/graphx/src/test/scala/org/apache/spark/graphx/GraphOpsSuite.scala
index 9bc8007ce4..68fe83739e 100644
--- a/graphx/src/test/scala/org/apache/spark/graphx/GraphOpsSuite.scala
+++ b/graphx/src/test/scala/org/apache/spark/graphx/GraphOpsSuite.scala
@@ -59,7 +59,7 @@ class GraphOpsSuite extends FunSuite with LocalSparkContext {
test ("filter") {
withSpark { sc =>
val n = 5
- val vertices = sc.parallelize((0 to n).map(x => (x:VertexId, x)))
+ val vertices = sc.parallelize((0 to n).map(x => (x: VertexId, x)))
val edges = sc.parallelize((1 to n).map(x => Edge(0, x, x)))
val graph: Graph[Int, Int] = Graph(vertices, edges).cache()
val filteredGraph = graph.filter(
@@ -67,11 +67,11 @@ class GraphOpsSuite extends FunSuite with LocalSparkContext {
val degrees: VertexRDD[Int] = graph.outDegrees
graph.outerJoinVertices(degrees) {(vid, data, deg) => deg.getOrElse(0)}
},
- vpred = (vid: VertexId, deg:Int) => deg > 0
+ vpred = (vid: VertexId, deg: Int) => deg > 0
).cache()
val v = filteredGraph.vertices.collect().toSet
- assert(v === Set((0,0)))
+ assert(v === Set((0, 0)))
// the map is necessary because of object-reuse in the edge iterator
val e = filteredGraph.edges.map(e => Edge(e.srcId, e.dstId, e.attr)).collect().toSet
diff --git a/graphx/src/test/scala/org/apache/spark/graphx/GraphSuite.scala b/graphx/src/test/scala/org/apache/spark/graphx/GraphSuite.scala
index a570e4ed75..2b1d8e4732 100644
--- a/graphx/src/test/scala/org/apache/spark/graphx/GraphSuite.scala
+++ b/graphx/src/test/scala/org/apache/spark/graphx/GraphSuite.scala
@@ -248,7 +248,7 @@ class GraphSuite extends FunSuite with LocalSparkContext {
test("mask") {
withSpark { sc =>
val n = 5
- val vertices = sc.parallelize((0 to n).map(x => (x:VertexId, x)))
+ val vertices = sc.parallelize((0 to n).map(x => (x: VertexId, x)))
val edges = sc.parallelize((1 to n).map(x => Edge(0, x, x)))
val graph: Graph[Int, Int] = Graph(vertices, edges).cache()
@@ -260,11 +260,11 @@ class GraphSuite extends FunSuite with LocalSparkContext {
val projectedGraph = graph.mask(subgraph)
val v = projectedGraph.vertices.collect().toSet
- assert(v === Set((0,0), (1,1), (2,2), (4,4), (5,5)))
+ assert(v === Set((0, 0), (1, 1), (2, 2), (4, 4), (5, 5)))
// the map is necessary because of object-reuse in the edge iterator
val e = projectedGraph.edges.map(e => Edge(e.srcId, e.dstId, e.attr)).collect().toSet
- assert(e === Set(Edge(0,1,1), Edge(0,2,2), Edge(0,5,5)))
+ assert(e === Set(Edge(0, 1, 1), Edge(0, 2, 2), Edge(0, 5, 5)))
}
}
diff --git a/graphx/src/test/scala/org/apache/spark/graphx/lib/ConnectedComponentsSuite.scala b/graphx/src/test/scala/org/apache/spark/graphx/lib/ConnectedComponentsSuite.scala
index 4cc30a9640..accccfc232 100644
--- a/graphx/src/test/scala/org/apache/spark/graphx/lib/ConnectedComponentsSuite.scala
+++ b/graphx/src/test/scala/org/apache/spark/graphx/lib/ConnectedComponentsSuite.scala
@@ -52,13 +52,16 @@ class ConnectedComponentsSuite extends FunSuite with LocalSparkContext {
withSpark { sc =>
val chain1 = (0 until 9).map(x => (x, x + 1))
val chain2 = (10 until 20).map(x => (x, x + 1))
- val rawEdges = sc.parallelize(chain1 ++ chain2, 3).map { case (s,d) => (s.toLong, d.toLong) }
+ val rawEdges = sc.parallelize(chain1 ++ chain2, 3).map { case (s, d) => (s.toLong, d.toLong) }
val twoChains = Graph.fromEdgeTuples(rawEdges, 1.0)
val ccGraph = twoChains.connectedComponents()
val vertices = ccGraph.vertices.collect()
for ( (id, cc) <- vertices ) {
- if(id < 10) { assert(cc === 0) }
- else { assert(cc === 10) }
+ if (id < 10) {
+ assert(cc === 0)
+ } else {
+ assert(cc === 10)
+ }
}
val ccMap = vertices.toMap
for (id <- 0 until 20) {
@@ -75,7 +78,7 @@ class ConnectedComponentsSuite extends FunSuite with LocalSparkContext {
withSpark { sc =>
val chain1 = (0 until 9).map(x => (x, x + 1))
val chain2 = (10 until 20).map(x => (x, x + 1))
- val rawEdges = sc.parallelize(chain1 ++ chain2, 3).map { case (s,d) => (s.toLong, d.toLong) }
+ val rawEdges = sc.parallelize(chain1 ++ chain2, 3).map { case (s, d) => (s.toLong, d.toLong) }
val twoChains = Graph.fromEdgeTuples(rawEdges, true).reverse
val ccGraph = twoChains.connectedComponents()
val vertices = ccGraph.vertices.collect()
@@ -106,9 +109,9 @@ class ConnectedComponentsSuite extends FunSuite with LocalSparkContext {
(4L, ("peter", "student"))))
// Create an RDD for edges
val relationships: RDD[Edge[String]] =
- sc.parallelize(Array(Edge(3L, 7L, "collab"), Edge(5L, 3L, "advisor"),
+ sc.parallelize(Array(Edge(3L, 7L, "collab"), Edge(5L, 3L, "advisor"),
Edge(2L, 5L, "colleague"), Edge(5L, 7L, "pi"),
- Edge(4L, 0L, "student"), Edge(5L, 0L, "colleague")))
+ Edge(4L, 0L, "student"), Edge(5L, 0L, "colleague")))
// Edges are:
// 2 ---> 5 ---> 3
// | \
diff --git a/graphx/src/test/scala/org/apache/spark/graphx/lib/PageRankSuite.scala b/graphx/src/test/scala/org/apache/spark/graphx/lib/PageRankSuite.scala
index 3f3c9dfd7b..39c6ace912 100644
--- a/graphx/src/test/scala/org/apache/spark/graphx/lib/PageRankSuite.scala
+++ b/graphx/src/test/scala/org/apache/spark/graphx/lib/PageRankSuite.scala
@@ -31,14 +31,14 @@ object GridPageRank {
def sub2ind(r: Int, c: Int): Int = r * nCols + c
// Make the grid graph
for (r <- 0 until nRows; c <- 0 until nCols) {
- val ind = sub2ind(r,c)
+ val ind = sub2ind(r, c)
if (r + 1 < nRows) {
outDegree(ind) += 1
- inNbrs(sub2ind(r + 1,c)) += ind
+ inNbrs(sub2ind(r + 1, c)) += ind
}
if (c + 1 < nCols) {
outDegree(ind) += 1
- inNbrs(sub2ind(r,c + 1)) += ind
+ inNbrs(sub2ind(r, c + 1)) += ind
}
}
// compute the pagerank
@@ -99,8 +99,8 @@ class PageRankSuite extends FunSuite with LocalSparkContext {
val resetProb = 0.15
val errorTol = 1.0e-5
- val staticRanks1 = starGraph.staticPersonalizedPageRank(0,numIter = 1, resetProb).vertices
- val staticRanks2 = starGraph.staticPersonalizedPageRank(0,numIter = 2, resetProb)
+ val staticRanks1 = starGraph.staticPersonalizedPageRank(0, numIter = 1, resetProb).vertices
+ val staticRanks2 = starGraph.staticPersonalizedPageRank(0, numIter = 2, resetProb)
.vertices.cache()
// Static PageRank should only take 2 iterations to converge
@@ -117,7 +117,7 @@ class PageRankSuite extends FunSuite with LocalSparkContext {
}
assert(staticErrors.sum === 0)
- val dynamicRanks = starGraph.personalizedPageRank(0,0, resetProb).vertices.cache()
+ val dynamicRanks = starGraph.personalizedPageRank(0, 0, resetProb).vertices.cache()
assert(compareRanks(staticRanks2, dynamicRanks) < errorTol)
}
} // end of test Star PageRank
@@ -162,7 +162,7 @@ class PageRankSuite extends FunSuite with LocalSparkContext {
test("Chain PersonalizedPageRank") {
withSpark { sc =>
val chain1 = (0 until 9).map(x => (x, x + 1) )
- val rawEdges = sc.parallelize(chain1, 1).map { case (s,d) => (s.toLong, d.toLong) }
+ val rawEdges = sc.parallelize(chain1, 1).map { case (s, d) => (s.toLong, d.toLong) }
val chain = Graph.fromEdgeTuples(rawEdges, 1.0).cache()
val resetProb = 0.15
val tol = 0.0001
diff --git a/graphx/src/test/scala/org/apache/spark/graphx/lib/TriangleCountSuite.scala b/graphx/src/test/scala/org/apache/spark/graphx/lib/TriangleCountSuite.scala
index 293c7f3ba4..79bf4e6cd1 100644
--- a/graphx/src/test/scala/org/apache/spark/graphx/lib/TriangleCountSuite.scala
+++ b/graphx/src/test/scala/org/apache/spark/graphx/lib/TriangleCountSuite.scala
@@ -58,7 +58,7 @@ class TriangleCountSuite extends FunSuite with LocalSparkContext {
val triangles =
Array(0L -> 1L, 1L -> 2L, 2L -> 0L) ++
Array(0L -> -1L, -1L -> -2L, -2L -> 0L)
- val revTriangles = triangles.map { case (a,b) => (b,a) }
+ val revTriangles = triangles.map { case (a, b) => (b, a) }
val rawEdges = sc.parallelize(triangles ++ revTriangles, 2)
val graph = Graph.fromEdgeTuples(rawEdges, true).cache()
val triangleCount = graph.triangleCount()