aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCodingCat <zhunansjtu@gmail.com>2015-03-30 11:54:44 -0700
committerReynold Xin <rxin@databricks.com>2015-03-30 11:54:44 -0700
commit32259c671ab419f4c8a6ba8e2f7d676c5dfd0f4f (patch)
tree8e8b4e37f8cf1d060f34a25ea9022c1dc370739f
parentfe81f6c779213a91369ec61cf5489ad5c66cc49c (diff)
downloadspark-32259c671ab419f4c8a6ba8e2f7d676c5dfd0f4f.tar.gz
spark-32259c671ab419f4c8a6ba8e2f7d676c5dfd0f4f.tar.bz2
spark-32259c671ab419f4c8a6ba8e2f7d676c5dfd0f4f.zip
[SPARK-6592][SQL] fix filter for scaladoc to generate API doc for Row class under catalyst dir
https://issues.apache.org/jira/browse/SPARK-6592 The current impl in SparkBuild.scala filter all classes under catalyst directory, however, we have a corner case that Row class is a public API under that directory we need to include Row into the scaladoc while still excluding other classes of catalyst project Thanks for the help on this patch from rxin and liancheng Author: CodingCat <zhunansjtu@gmail.com> Closes #5252 from CodingCat/SPARK-6592 and squashes the following commits: 02098a4 [CodingCat] ignore collection, enable types (except those protected classes) f7af2cb [CodingCat] commit 3ab4403 [CodingCat] fix filter for scaladoc to generate API doc for Row.scala under catalyst directory
-rw-r--r--project/SparkBuild.scala16
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeConversions.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeParser.scala2
3 files changed, 10 insertions, 10 deletions
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index ac37c605de..d3faa551a4 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -360,15 +360,15 @@ object Unidoc {
packages
.map(_.filterNot(_.getName.contains("$")))
.map(_.filterNot(_.getCanonicalPath.contains("akka")))
- .map(_.filterNot(_.getCanonicalPath.contains("deploy")))
- .map(_.filterNot(_.getCanonicalPath.contains("network")))
- .map(_.filterNot(_.getCanonicalPath.contains("shuffle")))
- .map(_.filterNot(_.getCanonicalPath.contains("executor")))
+ .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/deploy")))
+ .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/network")))
+ .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/shuffle")))
+ .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/executor")))
.map(_.filterNot(_.getCanonicalPath.contains("python")))
- .map(_.filterNot(_.getCanonicalPath.contains("collection")))
- .map(_.filterNot(_.getCanonicalPath.contains("sql/catalyst")))
- .map(_.filterNot(_.getCanonicalPath.contains("sql/execution")))
- .map(_.filterNot(_.getCanonicalPath.contains("sql/hive/test")))
+ .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/util/collection")))
+ .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/catalyst")))
+ .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/execution")))
+ .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/hive/test")))
}
lazy val settings = scalaJavaUnidocSettings ++ Seq (
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeConversions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeConversions.scala
index c243be07a9..a9d63e7849 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeConversions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeConversions.scala
@@ -24,7 +24,7 @@ import org.apache.spark.sql.catalyst.ScalaReflection
import org.apache.spark.sql.catalyst.expressions.GenericMutableRow
-protected[sql] object DataTypeConversions {
+private[sql] object DataTypeConversions {
def productToRow(product: Product, schema: StructType): Row = {
val mutableRow = new GenericMutableRow(product.productArity)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeParser.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeParser.scala
index 89278f7dbc..34270d0ca7 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeParser.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeParser.scala
@@ -112,4 +112,4 @@ private[sql] object DataTypeParser {
}
/** The exception thrown from the [[DataTypeParser]]. */
-protected[sql] class DataTypeException(message: String) extends Exception(message)
+private[sql] class DataTypeException(message: String) extends Exception(message)