diff options
author | Sandeep <sandeep@techaddict.me> | 2014-04-10 15:04:13 -0700 |
---|---|---|
committer | Patrick Wendell <pwendell@gmail.com> | 2014-04-10 15:04:13 -0700 |
commit | 930b70f0523e96fe01c1317ef7fad1b76b36d4d9 (patch) | |
tree | fba70b8897f6c5ae1123e4717d8efdb4d4b0acc4 /graphx/src | |
parent | f0466625200842f3cc486e9aa1caa417586be533 (diff) | |
download | spark-930b70f0523e96fe01c1317ef7fad1b76b36d4d9.tar.gz spark-930b70f0523e96fe01c1317ef7fad1b76b36d4d9.tar.bz2 spark-930b70f0523e96fe01c1317ef7fad1b76b36d4d9.zip |
Remove Unnecessary Whitespace's
stack these together in a commit else they show up chunk by chunk in different commits.
Author: Sandeep <sandeep@techaddict.me>
Closes #380 from techaddict/white_space and squashes the following commits:
b58f294 [Sandeep] Remove Unnecessary Whitespace's
Diffstat (limited to 'graphx/src')
-rw-r--r-- | graphx/src/main/scala/org/apache/spark/graphx/GraphOps.scala | 2 | ||||
-rw-r--r-- | graphx/src/test/scala/org/apache/spark/graphx/GraphOpsSuite.scala | 2 |
2 files changed, 2 insertions, 2 deletions
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/GraphOps.scala b/graphx/src/main/scala/org/apache/spark/graphx/GraphOps.scala index 377d9d6bd5..5635287694 100644 --- a/graphx/src/main/scala/org/apache/spark/graphx/GraphOps.scala +++ b/graphx/src/main/scala/org/apache/spark/graphx/GraphOps.scala @@ -172,7 +172,7 @@ class GraphOps[VD: ClassTag, ED: ClassTag](graph: Graph[VD, ED]) extends Seriali "EdgeDirection.Either instead.") } } - + /** * Join the vertices with an RDD and then apply a function from the * the vertex and RDD entry to a new vertex value. The input table diff --git a/graphx/src/test/scala/org/apache/spark/graphx/GraphOpsSuite.scala b/graphx/src/test/scala/org/apache/spark/graphx/GraphOpsSuite.scala index 6386306c04..a467ca1ae7 100644 --- a/graphx/src/test/scala/org/apache/spark/graphx/GraphOpsSuite.scala +++ b/graphx/src/test/scala/org/apache/spark/graphx/GraphOpsSuite.scala @@ -55,7 +55,7 @@ class GraphOpsSuite extends FunSuite with LocalSparkContext { } } } - + test ("filter") { withSpark { sc => val n = 5 |