aboutsummaryrefslogtreecommitdiff
path: root/graphx
diff options
context:
space:
mode:
authorhyukjinkwon <gurwls223@gmail.com>2016-11-29 13:50:24 +0000
committerSean Owen <sowen@cloudera.com>2016-11-29 13:50:24 +0000
commit1a870090e4266df570c3f56c1e2ea12d090d03d1 (patch)
treea9fb06a37865beff9a77f805e5c3cd5ed1e40ec8 /graphx
parentf045d9dade66d44f5ca4768bfe6a484e9288ec8d (diff)
downloadspark-1a870090e4266df570c3f56c1e2ea12d090d03d1.tar.gz
spark-1a870090e4266df570c3f56c1e2ea12d090d03d1.tar.bz2
spark-1a870090e4266df570c3f56c1e2ea12d090d03d1.zip
[SPARK-18615][DOCS] Switch to multi-line doc to avoid a genjavadoc bug for backticks
## What changes were proposed in this pull request? Currently, single line comment does not mark down backticks to `<code>..</code>` but prints as they are (`` `..` ``). For example, the line below: ```scala /** Return an RDD with the pairs from `this` whose keys are not in `other`. */ ``` So, we could work around this as below: ```scala /** * Return an RDD with the pairs from `this` whose keys are not in `other`. */ ``` - javadoc - **Before** ![2016-11-29 10 39 14](https://cloud.githubusercontent.com/assets/6477701/20693606/e64c8f90-b622-11e6-8dfc-4a029216e23d.png) - **After** ![2016-11-29 10 39 08](https://cloud.githubusercontent.com/assets/6477701/20693607/e7280d36-b622-11e6-8502-d2e21cd5556b.png) - scaladoc (this one looks fine either way) - **Before** ![2016-11-29 10 38 22](https://cloud.githubusercontent.com/assets/6477701/20693640/12c18aa8-b623-11e6-901a-693e2f6f8066.png) - **After** ![2016-11-29 10 40 05](https://cloud.githubusercontent.com/assets/6477701/20693642/14eb043a-b623-11e6-82ac-7cd0000106d1.png) I suspect this is related with SPARK-16153 and genjavadoc issue in ` typesafehub/genjavadoc#85`. ## How was this patch tested? I found them via ``` grep -r "\/\*\*.*\`" . | grep .scala ```` and then checked if each is in the public API documentation with manually built docs (`jekyll build`) with Java 7. Author: hyukjinkwon <gurwls223@gmail.com> Closes #16050 from HyukjinKwon/javadoc-markdown.
Diffstat (limited to 'graphx')
-rw-r--r--graphx/src/main/scala/org/apache/spark/graphx/impl/EdgeRDDImpl.scala4
-rw-r--r--graphx/src/main/scala/org/apache/spark/graphx/impl/GraphImpl.scala12
-rw-r--r--graphx/src/main/scala/org/apache/spark/graphx/impl/VertexRDDImpl.scala4
3 files changed, 15 insertions, 5 deletions
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/impl/EdgeRDDImpl.scala b/graphx/src/main/scala/org/apache/spark/graphx/impl/EdgeRDDImpl.scala
index faa985594e..376c7b06f9 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/impl/EdgeRDDImpl.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/impl/EdgeRDDImpl.scala
@@ -63,7 +63,9 @@ class EdgeRDDImpl[ED: ClassTag, VD: ClassTag] private[graphx] (
this
}
- /** Persists the edge partitions using `targetStorageLevel`, which defaults to MEMORY_ONLY. */
+ /**
+ * Persists the edge partitions using `targetStorageLevel`, which defaults to MEMORY_ONLY.
+ */
override def cache(): this.type = {
partitionsRDD.persist(targetStorageLevel)
this
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/impl/GraphImpl.scala b/graphx/src/main/scala/org/apache/spark/graphx/impl/GraphImpl.scala
index 3810110099..5d2a53782b 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/impl/GraphImpl.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/impl/GraphImpl.scala
@@ -277,7 +277,9 @@ class GraphImpl[VD: ClassTag, ED: ClassTag] protected (
object GraphImpl {
- /** Create a graph from edges, setting referenced vertices to `defaultVertexAttr`. */
+ /**
+ * Create a graph from edges, setting referenced vertices to `defaultVertexAttr`.
+ */
def apply[VD: ClassTag, ED: ClassTag](
edges: RDD[Edge[ED]],
defaultVertexAttr: VD,
@@ -286,7 +288,9 @@ object GraphImpl {
fromEdgeRDD(EdgeRDD.fromEdges(edges), defaultVertexAttr, edgeStorageLevel, vertexStorageLevel)
}
- /** Create a graph from EdgePartitions, setting referenced vertices to `defaultVertexAttr`. */
+ /**
+ * Create a graph from EdgePartitions, setting referenced vertices to `defaultVertexAttr`.
+ */
def fromEdgePartitions[VD: ClassTag, ED: ClassTag](
edgePartitions: RDD[(PartitionID, EdgePartition[ED, VD])],
defaultVertexAttr: VD,
@@ -296,7 +300,9 @@ object GraphImpl {
vertexStorageLevel)
}
- /** Create a graph from vertices and edges, setting missing vertices to `defaultVertexAttr`. */
+ /**
+ * Create a graph from vertices and edges, setting missing vertices to `defaultVertexAttr`.
+ */
def apply[VD: ClassTag, ED: ClassTag](
vertices: RDD[(VertexId, VD)],
edges: RDD[Edge[ED]],
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/impl/VertexRDDImpl.scala b/graphx/src/main/scala/org/apache/spark/graphx/impl/VertexRDDImpl.scala
index d314522de9..3c6f22d973 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/impl/VertexRDDImpl.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/impl/VertexRDDImpl.scala
@@ -63,7 +63,9 @@ class VertexRDDImpl[VD] private[graphx] (
this
}
- /** Persists the vertex partitions at `targetStorageLevel`, which defaults to MEMORY_ONLY. */
+ /**
+ * Persists the vertex partitions at `targetStorageLevel`, which defaults to MEMORY_ONLY.
+ */
override def cache(): this.type = {
partitionsRDD.persist(targetStorageLevel)
this