aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--project/SparkBuild.scala16
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeConversions.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeParser.scala2
3 files changed, 10 insertions, 10 deletions
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index ac37c605de..d3faa551a4 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -360,15 +360,15 @@ object Unidoc {
packages
.map(_.filterNot(_.getName.contains("$")))
.map(_.filterNot(_.getCanonicalPath.contains("akka")))
- .map(_.filterNot(_.getCanonicalPath.contains("deploy")))
- .map(_.filterNot(_.getCanonicalPath.contains("network")))
- .map(_.filterNot(_.getCanonicalPath.contains("shuffle")))
- .map(_.filterNot(_.getCanonicalPath.contains("executor")))
+ .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/deploy")))
+ .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/network")))
+ .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/shuffle")))
+ .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/executor")))
.map(_.filterNot(_.getCanonicalPath.contains("python")))
- .map(_.filterNot(_.getCanonicalPath.contains("collection")))
- .map(_.filterNot(_.getCanonicalPath.contains("sql/catalyst")))
- .map(_.filterNot(_.getCanonicalPath.contains("sql/execution")))
- .map(_.filterNot(_.getCanonicalPath.contains("sql/hive/test")))
+ .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/util/collection")))
+ .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/catalyst")))
+ .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/execution")))
+ .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/hive/test")))
}
lazy val settings = scalaJavaUnidocSettings ++ Seq (
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeConversions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeConversions.scala
index c243be07a9..a9d63e7849 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeConversions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeConversions.scala
@@ -24,7 +24,7 @@ import org.apache.spark.sql.catalyst.ScalaReflection
import org.apache.spark.sql.catalyst.expressions.GenericMutableRow
-protected[sql] object DataTypeConversions {
+private[sql] object DataTypeConversions {
def productToRow(product: Product, schema: StructType): Row = {
val mutableRow = new GenericMutableRow(product.productArity)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeParser.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeParser.scala
index 89278f7dbc..34270d0ca7 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeParser.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeParser.scala
@@ -112,4 +112,4 @@ private[sql] object DataTypeParser {
}
/** The exception thrown from the [[DataTypeParser]]. */
-protected[sql] class DataTypeException(message: String) extends Exception(message)
+private[sql] class DataTypeException(message: String) extends Exception(message)