aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2016-10-11 15:35:52 +0800
committerWenchen Fan <wenchen@databricks.com>2016-10-11 15:35:52 +0800
commit3694ba48f0db0f47baea4b005cdeef3f454b7329 (patch)
tree1282ff6a2ffb2749bf4e9d483947979f08944921 /sql/catalyst
parent7388ad94d717784a1837ac5a4a9b53219892d080 (diff)
downloadspark-3694ba48f0db0f47baea4b005cdeef3f454b7329.tar.gz
spark-3694ba48f0db0f47baea4b005cdeef3f454b7329.tar.bz2
spark-3694ba48f0db0f47baea4b005cdeef3f454b7329.zip
[SPARK-17864][SQL] Mark data type APIs as stable (not DeveloperApi)
## What changes were proposed in this pull request? The data type API has not been changed since Spark 1.3.0, and is ready for graduation. This patch marks them as stable APIs using the new InterfaceStability annotation. This patch also looks at the various files in the catalyst module (not the "package") and marks the remaining few classes appropriately as well. ## How was this patch tested? This is an annotation change. No functional changes. Author: Reynold Xin <rxin@databricks.com> Closes #15426 from rxin/SPARK-17864.
Diffstat (limited to 'sql/catalyst')
-rw-r--r--sql/catalyst/src/main/java/org/apache/spark/sql/RowFactory.java6
-rw-r--r--sql/catalyst/src/main/java/org/apache/spark/sql/streaming/OutputMode.java2
-rw-r--r--sql/catalyst/src/main/java/org/apache/spark/sql/types/DataTypes.java5
-rw-r--r--sql/catalyst/src/main/java/org/apache/spark/sql/types/SQLUserDefinedType.java2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala9
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala3
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/Encoders.scala3
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/Row.scala10
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala7
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala14
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/BinaryType.scala10
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/BooleanType.scala12
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/ByteType.scala12
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/CalendarIntervalType.scala12
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala11
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/DateType.scala12
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala5
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala14
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/DoubleType.scala11
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala12
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/IntegerType.scala11
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/LongType.scala12
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala10
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/Metadata.scala20
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/NullType.scala11
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/ShortType.scala11
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/StringType.scala11
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructField.scala5
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala23
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/TimestampType.scala11
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/UserDefinedType.scala4
31 files changed, 207 insertions, 94 deletions
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/RowFactory.java b/sql/catalyst/src/main/java/org/apache/spark/sql/RowFactory.java
index 5ed60fe78d..2ce1fdcbf5 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/RowFactory.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/RowFactory.java
@@ -17,16 +17,22 @@
package org.apache.spark.sql;
+import org.apache.spark.annotation.InterfaceStability;
import org.apache.spark.sql.catalyst.expressions.GenericRow;
/**
* A factory class used to construct {@link Row} objects.
+ *
+ * @since 1.3.0
*/
+@InterfaceStability.Stable
public class RowFactory {
/**
* Create a {@link Row} from the given arguments. Position i in the argument list becomes
* position i in the created {@link Row} object.
+ *
+ * @since 1.3.0
*/
public static Row create(Object ... values) {
return new GenericRow(values);
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/streaming/OutputMode.java b/sql/catalyst/src/main/java/org/apache/spark/sql/streaming/OutputMode.java
index 41e2582921..49a18df2c7 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/streaming/OutputMode.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/streaming/OutputMode.java
@@ -18,6 +18,7 @@
package org.apache.spark.sql.streaming;
import org.apache.spark.annotation.Experimental;
+import org.apache.spark.annotation.InterfaceStability;
import org.apache.spark.sql.InternalOutputModes;
/**
@@ -29,6 +30,7 @@ import org.apache.spark.sql.InternalOutputModes;
* @since 2.0.0
*/
@Experimental
+@InterfaceStability.Evolving
public class OutputMode {
/**
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/types/DataTypes.java b/sql/catalyst/src/main/java/org/apache/spark/sql/types/DataTypes.java
index 747ab1809f..0f8570fe47 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/types/DataTypes.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/types/DataTypes.java
@@ -19,10 +19,15 @@ package org.apache.spark.sql.types;
import java.util.*;
+import org.apache.spark.annotation.InterfaceStability;
+
/**
* To get/create specific data type, users should use singleton objects and factory methods
* provided by this class.
+ *
+ * @since 1.3.0
*/
+@InterfaceStability.Stable
public class DataTypes {
/**
* Gets the StringType object.
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/types/SQLUserDefinedType.java b/sql/catalyst/src/main/java/org/apache/spark/sql/types/SQLUserDefinedType.java
index 110ed460cc..1290614a32 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/types/SQLUserDefinedType.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/types/SQLUserDefinedType.java
@@ -20,6 +20,7 @@ package org.apache.spark.sql.types;
import java.lang.annotation.*;
import org.apache.spark.annotation.DeveloperApi;
+import org.apache.spark.annotation.InterfaceStability;
/**
* ::DeveloperApi::
@@ -30,6 +31,7 @@ import org.apache.spark.annotation.DeveloperApi;
@DeveloperApi
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
+@InterfaceStability.Evolving
public @interface SQLUserDefinedType {
/**
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala
index 6911843999..f3003306ac 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala
@@ -17,17 +17,16 @@
package org.apache.spark.sql
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
-// TODO: don't swallow original stack trace if it exists
-
/**
- * :: DeveloperApi ::
* Thrown when a query fails to analyze, usually because the query itself is invalid.
+ *
+ * @since 1.3.0
*/
-@DeveloperApi
+@InterfaceStability.Stable
class AnalysisException protected[sql] (
val message: String,
val line: Option[Int] = None,
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala
index 501c1304db..b9f8c46443 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala
@@ -20,7 +20,7 @@ package org.apache.spark.sql
import scala.annotation.implicitNotFound
import scala.reflect.ClassTag
-import org.apache.spark.annotation.Experimental
+import org.apache.spark.annotation.{Experimental, InterfaceStability}
import org.apache.spark.sql.types._
@@ -67,6 +67,7 @@ import org.apache.spark.sql.types._
* @since 1.6.0
*/
@Experimental
+@InterfaceStability.Evolving
@implicitNotFound("Unable to find encoder for type stored in a Dataset. Primitive types " +
"(Int, String, etc) and Product types (case classes) are supported by importing " +
"spark.implicits._ Support for serializing other types will be added in future " +
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoders.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoders.scala
index e72f67c48a..dc90659a67 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoders.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoders.scala
@@ -22,7 +22,7 @@ import java.lang.reflect.Modifier
import scala.reflect.{classTag, ClassTag}
import scala.reflect.runtime.universe.TypeTag
-import org.apache.spark.annotation.Experimental
+import org.apache.spark.annotation.{Experimental, InterfaceStability}
import org.apache.spark.sql.catalyst.analysis.GetColumnByOrdinal
import org.apache.spark.sql.catalyst.encoders.{encoderFor, ExpressionEncoder}
import org.apache.spark.sql.catalyst.expressions.{BoundReference, Cast}
@@ -36,6 +36,7 @@ import org.apache.spark.sql.types._
* @since 1.6.0
*/
@Experimental
+@InterfaceStability.Evolving
object Encoders {
/**
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/Row.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/Row.scala
index e16850efbe..344dcb9bce 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/Row.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/Row.scala
@@ -20,9 +20,14 @@ package org.apache.spark.sql
import scala.collection.JavaConverters._
import scala.util.hashing.MurmurHash3
+import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.sql.catalyst.expressions.GenericRow
import org.apache.spark.sql.types.StructType
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
object Row {
/**
* This method can be used to extract fields from a [[Row]] object in a pattern match. Example:
@@ -117,8 +122,9 @@ object Row {
* }
* }}}
*
- * @group row
+ * @since 1.3.0
*/
+@InterfaceStability.Stable
trait Row extends Serializable {
/** Number of elements in the Row. */
def size: Int = length
@@ -351,7 +357,7 @@ trait Row extends Serializable {
}.toMap
}
- override def toString(): String = s"[${this.mkString(",")}]"
+ override def toString: String = s"[${this.mkString(",")}]"
/**
* Make a copy of the current [[Row]] object.
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
index 1981fd8f0a..76dbb7cf0a 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql.types
import scala.reflect.runtime.universe.TypeTag
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.sql.catalyst.expressions.Expression
/**
@@ -131,10 +131,11 @@ protected[sql] abstract class AtomicType extends DataType {
/**
- * :: DeveloperApi ::
* Numeric data types.
+ *
+ * @since 1.3.0
*/
-@DeveloperApi
+@InterfaceStability.Stable
abstract class NumericType extends AtomicType {
// Unfortunately we can't get this implicitly as that breaks Spark Serialization. In order for
// implicitly[Numeric[JvmType]] to be valid, we have to change JvmType from a type variable to a
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala
index 82a03b0afc..5d70ef0137 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala
@@ -21,9 +21,15 @@ import scala.math.Ordering
import org.json4s.JsonDSL._
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.sql.catalyst.util.ArrayData
+/**
+ * Companion object for ArrayType.
+ *
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
object ArrayType extends AbstractDataType {
/** Construct a [[ArrayType]] object with the given element type. The `containsNull` is true. */
def apply(elementType: DataType): ArrayType = ArrayType(elementType, containsNull = true)
@@ -37,9 +43,7 @@ object ArrayType extends AbstractDataType {
override private[sql] def simpleString: String = "array"
}
-
/**
- * :: DeveloperApi ::
* The data type for collections of multiple values.
* Internally these are represented as columns that contain a ``scala.collection.Seq``.
*
@@ -51,8 +55,10 @@ object ArrayType extends AbstractDataType {
*
* @param elementType The data type of values.
* @param containsNull Indicates if values have `null` values
+ *
+ * @since 1.3.0
*/
-@DeveloperApi
+@InterfaceStability.Stable
case class ArrayType(elementType: DataType, containsNull: Boolean) extends DataType {
/** No-arg constructor for kryo. */
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BinaryType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BinaryType.scala
index c40e140e8c..a4a358a242 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BinaryType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BinaryType.scala
@@ -20,17 +20,16 @@ package org.apache.spark.sql.types
import scala.math.Ordering
import scala.reflect.runtime.universe.typeTag
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.sql.catalyst.ScalaReflectionLock
import org.apache.spark.sql.catalyst.util.TypeUtils
/**
- * :: DeveloperApi ::
* The data type representing `Array[Byte]` values.
* Please use the singleton [[DataTypes.BinaryType]].
*/
-@DeveloperApi
+@InterfaceStability.Stable
class BinaryType private() extends AtomicType {
// The companion object and this class is separated so the companion object also subclasses
// this type. Otherwise, the companion object would be of type "BinaryType$" in byte code.
@@ -54,5 +53,8 @@ class BinaryType private() extends AtomicType {
private[spark] override def asNullable: BinaryType = this
}
-
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
case object BinaryType extends BinaryType
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BooleanType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BooleanType.scala
index 2d8ee3d9bc..059f89f9cd 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BooleanType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BooleanType.scala
@@ -20,15 +20,16 @@ package org.apache.spark.sql.types
import scala.math.Ordering
import scala.reflect.runtime.universe.typeTag
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.sql.catalyst.ScalaReflectionLock
/**
- * :: DeveloperApi ::
* The data type representing `Boolean` values. Please use the singleton [[DataTypes.BooleanType]].
+ *
+ * @since 1.3.0
*/
-@DeveloperApi
+@InterfaceStability.Stable
class BooleanType private() extends AtomicType {
// The companion object and this class is separated so the companion object also subclasses
// this type. Otherwise, the companion object would be of type "BooleanType$" in byte code.
@@ -45,5 +46,8 @@ class BooleanType private() extends AtomicType {
private[spark] override def asNullable: BooleanType = this
}
-
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
case object BooleanType extends BooleanType
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ByteType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ByteType.scala
index d37130e27b..bc6251f024 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ByteType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ByteType.scala
@@ -20,14 +20,15 @@ package org.apache.spark.sql.types
import scala.math.{Integral, Numeric, Ordering}
import scala.reflect.runtime.universe.typeTag
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.sql.catalyst.ScalaReflectionLock
/**
- * :: DeveloperApi ::
* The data type representing `Byte` values. Please use the singleton [[DataTypes.ByteType]].
+ *
+ * @since 1.3.0
*/
-@DeveloperApi
+@InterfaceStability.Stable
class ByteType private() extends IntegralType {
// The companion object and this class is separated so the companion object also subclasses
// this type. Otherwise, the companion object would be of type "ByteType$" in byte code.
@@ -48,4 +49,9 @@ class ByteType private() extends IntegralType {
private[spark] override def asNullable: ByteType = this
}
+
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
case object ByteType extends ByteType
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/CalendarIntervalType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/CalendarIntervalType.scala
index 3565f52c21..e121044288 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/CalendarIntervalType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/CalendarIntervalType.scala
@@ -17,19 +17,19 @@
package org.apache.spark.sql.types
-import org.apache.spark.annotation.DeveloperApi
-
+import org.apache.spark.annotation.InterfaceStability
/**
- * :: DeveloperApi ::
* The data type representing calendar time intervals. The calendar time interval is stored
* internally in two components: number of months the number of microseconds.
*
* Note that calendar intervals are not comparable.
*
* Please use the singleton [[DataTypes.CalendarIntervalType]].
+ *
+ * @since 1.5.0
*/
-@DeveloperApi
+@InterfaceStability.Stable
class CalendarIntervalType private() extends DataType {
override def defaultSize: Int = 16
@@ -37,4 +37,8 @@ class CalendarIntervalType private() extends DataType {
private[spark] override def asNullable: CalendarIntervalType = this
}
+/**
+ * @since 1.5.0
+ */
+@InterfaceStability.Stable
case object CalendarIntervalType extends CalendarIntervalType
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala
index 4fc65cbce1..312585df15 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala
@@ -22,15 +22,16 @@ import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.util.Utils
/**
- * :: DeveloperApi ::
* The base type of all Spark SQL data types.
+ *
+ * @since 1.3.0
*/
-@DeveloperApi
+@InterfaceStability.Stable
abstract class DataType extends AbstractDataType {
/**
* Enables matching against DataType for expressions:
@@ -94,6 +95,10 @@ abstract class DataType extends AbstractDataType {
}
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
object DataType {
def fromJson(json: String): DataType = parseDataType(parse(json))
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DateType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DateType.scala
index 2c966230e4..8d0ecc051f 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DateType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DateType.scala
@@ -20,19 +20,20 @@ package org.apache.spark.sql.types
import scala.math.Ordering
import scala.reflect.runtime.universe.typeTag
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.sql.catalyst.ScalaReflectionLock
/**
- * :: DeveloperApi ::
* A date type, supporting "0001-01-01" through "9999-12-31".
*
* Please use the singleton [[DataTypes.DateType]].
*
* Internally, this is represented as the number of days from 1970-01-01.
+ *
+ * @since 1.3.0
*/
-@DeveloperApi
+@InterfaceStability.Stable
class DateType private() extends AtomicType {
// The companion object and this class is separated so the companion object also subclasses
// this type. Otherwise, the companion object would be of type "DateType$" in byte code.
@@ -51,5 +52,8 @@ class DateType private() extends AtomicType {
private[spark] override def asNullable: DateType = this
}
-
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
case object DateType extends DateType
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
index 7085905287..465fb83669 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
@@ -20,7 +20,7 @@ package org.apache.spark.sql.types
import java.lang.{Long => JLong}
import java.math.{BigInteger, MathContext, RoundingMode}
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
/**
* A mutable implementation of BigDecimal that can hold a Long if values are small enough.
@@ -30,6 +30,7 @@ import org.apache.spark.annotation.DeveloperApi
* - If decimalVal is set, it represents the whole decimal value
* - Otherwise, the decimal value is longVal / (10 ** _scale)
*/
+@InterfaceStability.Unstable
final class Decimal extends Ordered[Decimal] with Serializable {
import org.apache.spark.sql.types.Decimal._
@@ -185,7 +186,6 @@ final class Decimal extends Ordered[Decimal] with Serializable {
override def toString: String = toBigDecimal.toString()
- @DeveloperApi
def toDebugString: String = {
if (decimalVal.ne(null)) {
s"Decimal(expanded,$decimalVal,$precision,$scale})"
@@ -380,6 +380,7 @@ final class Decimal extends Ordered[Decimal] with Serializable {
}
}
+@InterfaceStability.Unstable
object Decimal {
val ROUND_HALF_UP = BigDecimal.RoundingMode.HALF_UP
val ROUND_HALF_EVEN = BigDecimal.RoundingMode.HALF_EVEN
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala
index 6500875f95..d7ca0cbeed 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala
@@ -19,14 +19,13 @@ package org.apache.spark.sql.types
import scala.reflect.runtime.universe.typeTag
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.ScalaReflectionLock
import org.apache.spark.sql.catalyst.expressions.Expression
/**
- * :: DeveloperApi ::
* The data type representing `java.math.BigDecimal` values.
* A Decimal that must have fixed precision (the maximum number of digits) and scale (the number
* of digits on right side of dot).
@@ -36,8 +35,10 @@ import org.apache.spark.sql.catalyst.expressions.Expression
* The default precision and scale is (10, 0).
*
* Please use [[DataTypes.createDecimalType()]] to create a specific instance.
+ *
+ * @since 1.3.0
*/
-@DeveloperApi
+@InterfaceStability.Stable
case class DecimalType(precision: Int, scale: Int) extends FractionalType {
if (scale > precision) {
@@ -101,7 +102,12 @@ case class DecimalType(precision: Int, scale: Int) extends FractionalType {
}
-/** Extra factory methods and pattern matchers for Decimals */
+/**
+ * Extra factory methods and pattern matchers for Decimals.
+ *
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
object DecimalType extends AbstractDataType {
import scala.math.min
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DoubleType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DoubleType.scala
index e553f65f3c..c21ac0e43e 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DoubleType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DoubleType.scala
@@ -21,15 +21,16 @@ import scala.math.{Fractional, Numeric, Ordering}
import scala.math.Numeric.DoubleAsIfIntegral
import scala.reflect.runtime.universe.typeTag
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.sql.catalyst.ScalaReflectionLock
import org.apache.spark.util.Utils
/**
- * :: DeveloperApi ::
* The data type representing `Double` values. Please use the singleton [[DataTypes.DoubleType]].
+ *
+ * @since 1.3.0
*/
-@DeveloperApi
+@InterfaceStability.Stable
class DoubleType private() extends FractionalType {
// The companion object and this class is separated so the companion object also subclasses
// this type. Otherwise, the companion object would be of type "DoubleType$" in byte code.
@@ -51,4 +52,8 @@ class DoubleType private() extends FractionalType {
private[spark] override def asNullable: DoubleType = this
}
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
case object DoubleType extends DoubleType
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala
index ae9aa9eefa..c5bf8883ba 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala
@@ -21,15 +21,16 @@ import scala.math.{Fractional, Numeric, Ordering}
import scala.math.Numeric.FloatAsIfIntegral
import scala.reflect.runtime.universe.typeTag
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.sql.catalyst.ScalaReflectionLock
import org.apache.spark.util.Utils
/**
- * :: DeveloperApi ::
* The data type representing `Float` values. Please use the singleton [[DataTypes.FloatType]].
+ *
+ * @since 1.3.0
*/
-@DeveloperApi
+@InterfaceStability.Stable
class FloatType private() extends FractionalType {
// The companion object and this class is separated so the companion object also subclasses
// this type. Otherwise, the companion object would be of type "FloatType$" in byte code.
@@ -51,4 +52,9 @@ class FloatType private() extends FractionalType {
private[spark] override def asNullable: FloatType = this
}
+
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
case object FloatType extends FloatType
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/IntegerType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/IntegerType.scala
index 38a7b8ee52..724e59c0bc 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/IntegerType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/IntegerType.scala
@@ -20,15 +20,16 @@ package org.apache.spark.sql.types
import scala.math.{Integral, Numeric, Ordering}
import scala.reflect.runtime.universe.typeTag
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.sql.catalyst.ScalaReflectionLock
/**
- * :: DeveloperApi ::
* The data type representing `Int` values. Please use the singleton [[DataTypes.IntegerType]].
+ *
+ * @since 1.3.0
*/
-@DeveloperApi
+@InterfaceStability.Stable
class IntegerType private() extends IntegralType {
// The companion object and this class is separated so the companion object also subclasses
// this type. Otherwise, the companion object would be of type "IntegerType$" in byte code.
@@ -49,4 +50,8 @@ class IntegerType private() extends IntegralType {
private[spark] override def asNullable: IntegerType = this
}
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
case object IntegerType extends IntegerType
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/LongType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/LongType.scala
index 88aff0c877..42285a9d0a 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/LongType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/LongType.scala
@@ -20,14 +20,15 @@ package org.apache.spark.sql.types
import scala.math.{Integral, Numeric, Ordering}
import scala.reflect.runtime.universe.typeTag
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.sql.catalyst.ScalaReflectionLock
/**
- * :: DeveloperApi ::
* The data type representing `Long` values. Please use the singleton [[DataTypes.LongType]].
+ *
+ * @since 1.3.0
*/
-@DeveloperApi
+@InterfaceStability.Stable
class LongType private() extends IntegralType {
// The companion object and this class is separated so the companion object also subclasses
// this type. Otherwise, the companion object would be of type "LongType$" in byte code.
@@ -48,5 +49,8 @@ class LongType private() extends IntegralType {
private[spark] override def asNullable: LongType = this
}
-
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
case object LongType extends LongType
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala
index 178960929b..3a32aa43d1 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala
@@ -20,10 +20,9 @@ package org.apache.spark.sql.types
import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
/**
- * :: DeveloperApi ::
* The data type for Maps. Keys in a map are not allowed to have `null` values.
*
* Please use [[DataTypes.createMapType()]] to create a specific instance.
@@ -32,7 +31,7 @@ import org.apache.spark.annotation.DeveloperApi
* @param valueType The data type of map values.
* @param valueContainsNull Indicates if map values have `null` values.
*/
-@DeveloperApi
+@InterfaceStability.Stable
case class MapType(
keyType: DataType,
valueType: DataType,
@@ -76,7 +75,10 @@ case class MapType(
}
}
-
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
object MapType extends AbstractDataType {
override private[sql] def defaultConcreteType: DataType = apply(NullType, NullType)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Metadata.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Metadata.scala
index 657bd86ce1..3aa4bf619f 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Metadata.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Metadata.scala
@@ -22,22 +22,22 @@ import scala.collection.mutable
import org.json4s._
import org.json4s.jackson.JsonMethods._
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
/**
- * :: DeveloperApi ::
- *
* Metadata is a wrapper over Map[String, Any] that limits the value type to simple ones: Boolean,
* Long, Double, String, Metadata, Array[Boolean], Array[Long], Array[Double], Array[String], and
* Array[Metadata]. JSON is used for serialization.
*
* The default constructor is private. User should use either [[MetadataBuilder]] or
- * [[Metadata.fromJson()]] to create Metadata instances.
+ * `Metadata.fromJson()` to create Metadata instances.
*
* @param map an immutable map that stores the data
+ *
+ * @since 1.3.0
*/
-@DeveloperApi
+@InterfaceStability.Stable
sealed class Metadata private[types] (private[types] val map: Map[String, Any])
extends Serializable {
@@ -114,6 +114,10 @@ sealed class Metadata private[types] (private[types] val map: Map[String, Any])
private[sql] def jsonValue: JValue = Metadata.toJsonValue(this)
}
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
object Metadata {
private[this] val _empty = new Metadata(Map.empty)
@@ -218,11 +222,11 @@ object Metadata {
}
/**
- * :: DeveloperApi ::
- *
* Builder for [[Metadata]]. If there is a key collision, the latter will overwrite the former.
+ *
+ * @since 1.3.0
*/
-@DeveloperApi
+@InterfaceStability.Stable
class MetadataBuilder {
private val map: mutable.Map[String, Any] = mutable.Map.empty
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/NullType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/NullType.scala
index aa84115c2e..bdf9a819d0 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/NullType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/NullType.scala
@@ -17,14 +17,15 @@
package org.apache.spark.sql.types
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
/**
- * :: DeveloperApi ::
* The data type representing `NULL` values. Please use the singleton [[DataTypes.NullType]].
+ *
+ * @since 1.3.0
*/
-@DeveloperApi
+@InterfaceStability.Stable
class NullType private() extends DataType {
// The companion object and this class is separated so the companion object also subclasses
// this type. Otherwise, the companion object would be of type "NullType$" in byte code.
@@ -34,4 +35,8 @@ class NullType private() extends DataType {
private[spark] override def asNullable: NullType = this
}
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
case object NullType extends NullType
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ShortType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ShortType.scala
index 486cf58528..3fee299d57 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ShortType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ShortType.scala
@@ -20,14 +20,15 @@ package org.apache.spark.sql.types
import scala.math.{Integral, Numeric, Ordering}
import scala.reflect.runtime.universe.typeTag
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.sql.catalyst.ScalaReflectionLock
/**
- * :: DeveloperApi ::
* The data type representing `Short` values. Please use the singleton [[DataTypes.ShortType]].
+ *
+ * @since 1.3.0
*/
-@DeveloperApi
+@InterfaceStability.Stable
class ShortType private() extends IntegralType {
// The companion object and this class is separated so the companion object also subclasses
// this type. Otherwise, the companion object would be of type "ShortType$" in byte code.
@@ -48,4 +49,8 @@ class ShortType private() extends IntegralType {
private[spark] override def asNullable: ShortType = this
}
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
case object ShortType extends ShortType
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StringType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StringType.scala
index 44a25361f3..5d5a6f52a3 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StringType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StringType.scala
@@ -20,15 +20,16 @@ package org.apache.spark.sql.types
import scala.math.Ordering
import scala.reflect.runtime.universe.typeTag
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.sql.catalyst.ScalaReflectionLock
import org.apache.spark.unsafe.types.UTF8String
/**
- * :: DeveloperApi ::
* The data type representing `String` values. Please use the singleton [[DataTypes.StringType]].
+ *
+ * @since 1.3.0
*/
-@DeveloperApi
+@InterfaceStability.Stable
class StringType private() extends AtomicType {
// The companion object and this class is separated so the companion object also subclasses
// this type. Otherwise, the companion object would be of type "StringType$" in byte code.
@@ -45,5 +46,9 @@ class StringType private() extends AtomicType {
private[spark] override def asNullable: StringType = this
}
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
case object StringType extends StringType
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructField.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructField.scala
index cb8bf61696..2c18fdcc49 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructField.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructField.scala
@@ -20,6 +20,8 @@ package org.apache.spark.sql.types
import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._
+import org.apache.spark.annotation.InterfaceStability
+
/**
* A field inside a StructType.
* @param name The name of this field.
@@ -27,7 +29,10 @@ import org.json4s.JsonDSL._
* @param nullable Indicates if values of this field can be `null` values.
* @param metadata The metadata of this field. The metadata should be preserved during
* transformation if the content of the column is not modified, e.g, in selection.
+ *
+ * @since 1.3.0
*/
+@InterfaceStability.Stable
case class StructField(
name: String,
dataType: DataType,
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala
index dd4c88c4c4..0205c13aa9 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala
@@ -23,14 +23,13 @@ import scala.util.Try
import org.json4s.JsonDSL._
import org.apache.spark.SparkException
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, InterpretedOrdering}
import org.apache.spark.sql.catalyst.parser.{CatalystSqlParser, LegacyTypeStringParser}
import org.apache.spark.sql.catalyst.util.quoteIdentifier
import org.apache.spark.util.Utils
/**
- * :: DeveloperApi ::
* A [[StructType]] object can be constructed by
* {{{
* StructType(fields: Seq[StructField])
@@ -90,8 +89,10 @@ import org.apache.spark.util.Utils
* val row = Row(Row(1, 2, true))
* // row: Row = [[1,2,true]]
* }}}
+ *
+ * @since 1.3.0
*/
-@DeveloperApi
+@InterfaceStability.Stable
case class StructType(fields: Array[StructField]) extends DataType with Seq[StructField] {
/** No-arg constructor for kryo. */
@@ -138,7 +139,7 @@ case class StructType(fields: Array[StructField]) extends DataType with Seq[Stru
* .add("c", StringType)
*/
def add(name: String, dataType: DataType): StructType = {
- StructType(fields :+ new StructField(name, dataType, nullable = true, Metadata.empty))
+ StructType(fields :+ StructField(name, dataType, nullable = true, Metadata.empty))
}
/**
@@ -150,7 +151,7 @@ case class StructType(fields: Array[StructField]) extends DataType with Seq[Stru
* .add("c", StringType, true)
*/
def add(name: String, dataType: DataType, nullable: Boolean): StructType = {
- StructType(fields :+ new StructField(name, dataType, nullable, Metadata.empty))
+ StructType(fields :+ StructField(name, dataType, nullable, Metadata.empty))
}
/**
@@ -167,7 +168,7 @@ case class StructType(fields: Array[StructField]) extends DataType with Seq[Stru
dataType: DataType,
nullable: Boolean,
metadata: Metadata): StructType = {
- StructType(fields :+ new StructField(name, dataType, nullable, metadata))
+ StructType(fields :+ StructField(name, dataType, nullable, metadata))
}
/**
@@ -347,7 +348,7 @@ case class StructType(fields: Array[StructField]) extends DataType with Seq[Stru
private[sql] override def simpleString(maxNumberFields: Int): String = {
val builder = new StringBuilder
val fieldTypes = fields.take(maxNumberFields).map {
- case f => s"${f.name}: ${f.dataType.simpleString(maxNumberFields)}"
+ f => s"${f.name}: ${f.dataType.simpleString(maxNumberFields)}"
}
builder.append("struct<")
builder.append(fieldTypes.mkString(", "))
@@ -393,6 +394,10 @@ case class StructType(fields: Array[StructField]) extends DataType with Seq[Stru
InterpretedOrdering.forSchema(this.fields.map(_.dataType))
}
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
object StructType extends AbstractDataType {
/**
@@ -469,7 +474,7 @@ object StructType extends AbstractDataType {
nullable = leftNullable || rightNullable)
}
.orElse {
- optionalMeta.putBoolean(metadataKeyForOptionalField, true)
+ optionalMeta.putBoolean(metadataKeyForOptionalField, value = true)
Some(leftField.copy(metadata = optionalMeta.build()))
}
.foreach(newFields += _)
@@ -479,7 +484,7 @@ object StructType extends AbstractDataType {
rightFields
.filterNot(f => leftMapped.get(f.name).nonEmpty)
.foreach { f =>
- optionalMeta.putBoolean(metadataKeyForOptionalField, true)
+ optionalMeta.putBoolean(metadataKeyForOptionalField, value = true)
newFields += f.copy(metadata = optionalMeta.build())
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/TimestampType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/TimestampType.scala
index 2be9b2d76c..4540d8358a 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/TimestampType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/TimestampType.scala
@@ -20,16 +20,17 @@ package org.apache.spark.sql.types
import scala.math.Ordering
import scala.reflect.runtime.universe.typeTag
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.sql.catalyst.ScalaReflectionLock
/**
- * :: DeveloperApi ::
* The data type representing `java.sql.Timestamp` values.
* Please use the singleton [[DataTypes.TimestampType]].
+ *
+ * @since 1.3.0
*/
-@DeveloperApi
+@InterfaceStability.Stable
class TimestampType private() extends AtomicType {
// The companion object and this class is separated so the companion object also subclasses
// this type. Otherwise, the companion object would be of type "TimestampType$" in byte code.
@@ -48,4 +49,8 @@ class TimestampType private() extends AtomicType {
private[spark] override def asNullable: TimestampType = this
}
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
case object TimestampType extends TimestampType
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UserDefinedType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UserDefinedType.scala
index 894631382f..c33219c95b 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UserDefinedType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UserDefinedType.scala
@@ -22,8 +22,6 @@ import java.util.Objects
import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._
-import org.apache.spark.annotation.DeveloperApi
-
/**
* The data type for User Defined Types (UDTs).
*
@@ -96,12 +94,10 @@ abstract class UserDefinedType[UserType >: Null] extends DataType with Serializa
}
/**
- * :: DeveloperApi ::
* The user defined type in Python.
*
* Note: This can only be accessed via Python UDF, or accessed as serialized object.
*/
-@DeveloperApi
private[sql] class PythonUserDefinedType(
val sqlType: DataType,
override val pyUDT: String,