aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorCheng Lian <lian.cs.zju@gmail.com>2014-03-26 15:36:18 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-03-26 15:36:18 -0700
commit345825d97987b9eeb2afcf002f815a05ff51fc2e (patch)
tree677dd5c9514680831ba238f7d0f61a37f0e302fa /core
parenta0853a39e1907700737ec924367cd2610da8395a (diff)
downloadspark-345825d97987b9eeb2afcf002f815a05ff51fc2e.tar.gz
spark-345825d97987b9eeb2afcf002f815a05ff51fc2e.tar.bz2
spark-345825d97987b9eeb2afcf002f815a05ff51fc2e.zip
Unified package definition format in Spark SQL
According to discussions in comments of PR #208, this PR unifies package definition format in Spark SQL. Some broken links in ScalaDoc and typos detected along the way are also fixed. Author: Cheng Lian <lian.cs.zju@gmail.com> Closes #225 from liancheng/packageDefinition and squashes the following commits: 75c47b3 [Cheng Lian] Fixed file line length 4f87968 [Cheng Lian] Unified package definition format in Spark SQL
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/Dependency.scala5
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala4
2 files changed, 5 insertions, 4 deletions
diff --git a/core/src/main/scala/org/apache/spark/Dependency.scala b/core/src/main/scala/org/apache/spark/Dependency.scala
index 448f87b81e..3132dcf745 100644
--- a/core/src/main/scala/org/apache/spark/Dependency.scala
+++ b/core/src/main/scala/org/apache/spark/Dependency.scala
@@ -44,8 +44,9 @@ abstract class NarrowDependency[T](rdd: RDD[T]) extends Dependency(rdd) {
* Represents a dependency on the output of a shuffle stage.
* @param rdd the parent RDD
* @param partitioner partitioner used to partition the shuffle output
- * @param serializer [[Serializer]] to use. If set to null, the default serializer, as specified
- * by `spark.serializer` config option, will be used.
+ * @param serializer [[org.apache.spark.serializer.Serializer Serializer]] to use. If set to null,
+ * the default serializer, as specified by `spark.serializer` config option, will
+ * be used.
*/
class ShuffleDependency[K, V](
@transient rdd: RDD[_ <: Product2[K, V]],
diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala b/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
index 05b89b9857..ddac553304 100644
--- a/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
@@ -481,7 +481,7 @@ trait JavaRDDLike[T, This <: JavaRDDLike[T, This]] extends Serializable {
/**
* Returns the maximum element from this RDD as defined by the specified
* Comparator[T].
- * @params comp the comparator that defines ordering
+ * @param comp the comparator that defines ordering
* @return the maximum of the RDD
* */
def max(comp: Comparator[T]): T = {
@@ -491,7 +491,7 @@ trait JavaRDDLike[T, This <: JavaRDDLike[T, This]] extends Serializable {
/**
* Returns the minimum element from this RDD as defined by the specified
* Comparator[T].
- * @params comp the comparator that defines ordering
+ * @param comp the comparator that defines ordering
* @return the minimum of the RDD
* */
def min(comp: Comparator[T]): T = {