aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorZheng RuiFeng <ruifengz@foxmail.com>2016-05-09 11:20:48 -0700
committerAndrew Or <andrew@databricks.com>2016-05-09 11:21:16 -0700
commitdfdcab00c7b6200c22883baa3ebc5818be09556f (patch)
tree4ceba755b667a825feb149aac7a984f2d67368ba
parentf8aca5b4a98ee16c296aa7850925fdc756813b87 (diff)
downloadspark-dfdcab00c7b6200c22883baa3ebc5818be09556f.tar.gz
spark-dfdcab00c7b6200c22883baa3ebc5818be09556f.tar.bz2
spark-dfdcab00c7b6200c22883baa3ebc5818be09556f.zip
[SPARK-15210][SQL] Add missing @DeveloperApi annotation in sql.types
add DeveloperApi annotation for `AbstractDataType` `MapType` `UserDefinedType` local build Author: Zheng RuiFeng <ruifengz@foxmail.com> Closes #12982 from zhengruifeng/types_devapi.
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/UserDefinedType.scala3
3 files changed, 6 insertions, 1 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
index 90af10f7a6..03ea349221 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
@@ -20,6 +20,7 @@ package org.apache.spark.sql.types
import scala.reflect.ClassTag
import scala.reflect.runtime.universe.{runtimeMirror, TypeTag}
+import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.sql.catalyst.ScalaReflectionLock
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.util.Utils
@@ -141,6 +142,7 @@ protected[sql] abstract class AtomicType extends DataType {
* :: DeveloperApi ::
* Numeric data types.
*/
+@DeveloperApi
abstract class NumericType extends AtomicType {
// Unfortunately we can't get this implicitly as that breaks Spark Serialization. In order for
// implicitly[Numeric[JvmType]] to be valid, we have to change JvmType from a type variable to a
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala
index 5474954af7..454ea403ba 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala
@@ -20,6 +20,7 @@ package org.apache.spark.sql.types
import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._
+import org.apache.spark.annotation.DeveloperApi
/**
* :: DeveloperApi ::
@@ -31,6 +32,7 @@ import org.json4s.JsonDSL._
* @param valueType The data type of map values.
* @param valueContainsNull Indicates if map values have `null` values.
*/
+@DeveloperApi
case class MapType(
keyType: DataType,
valueType: DataType,
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UserDefinedType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UserDefinedType.scala
index aa36121bde..894631382f 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UserDefinedType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UserDefinedType.scala
@@ -96,11 +96,12 @@ abstract class UserDefinedType[UserType >: Null] extends DataType with Serializa
}
/**
- * ::DeveloperApi::
+ * :: DeveloperApi ::
* The user defined type in Python.
*
* Note: This can only be accessed via Python UDF, or accessed as serialized object.
*/
+@DeveloperApi
private[sql] class PythonUserDefinedType(
val sqlType: DataType,
override val pyUDT: String,