aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2015-05-30 19:50:52 -0700
committerReynold Xin <rxin@databricks.com>2015-05-30 19:50:52 -0700
commit14b314dc2cad7bbf23976347217c676d338e0a2d (patch)
tree9506f7f2fcc5c7bc57a1799717c4f1fa081a71e3 /sql
parent2b258e1c0784c8ca958bf94cd9e75fa17f104448 (diff)
downloadspark-14b314dc2cad7bbf23976347217c676d338e0a2d.tar.gz
spark-14b314dc2cad7bbf23976347217c676d338e0a2d.tar.bz2
spark-14b314dc2cad7bbf23976347217c676d338e0a2d.zip
[SQL] Tighten up visibility for JavaDoc.
I went through all the JavaDocs and tightened up visibility. Author: Reynold Xin <rxin@databricks.com> Closes #6526 from rxin/sql-1.4-visibility-for-docs and squashes the following commits: bc37d1e [Reynold Xin] Tighten up visibility for JavaDoc.
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala6
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/SQLUserDefinedType.java4
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/GroupedData.scala8
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/expressions/Window.scala17
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/sources/interfaces.scala3
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala2
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ReflectionMagic.scala2
8 files changed, 32 insertions, 14 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
index 994c5202c1..eb3c58c37f 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
@@ -313,7 +313,7 @@ object Decimal {
// See scala.math's Numeric.scala for examples for Scala's built-in types.
/** Common methods for Decimal evidence parameters */
- trait DecimalIsConflicted extends Numeric[Decimal] {
+ private[sql] trait DecimalIsConflicted extends Numeric[Decimal] {
override def plus(x: Decimal, y: Decimal): Decimal = x + y
override def times(x: Decimal, y: Decimal): Decimal = x * y
override def minus(x: Decimal, y: Decimal): Decimal = x - y
@@ -327,12 +327,12 @@ object Decimal {
}
/** A [[scala.math.Fractional]] evidence parameter for Decimals. */
- object DecimalIsFractional extends DecimalIsConflicted with Fractional[Decimal] {
+ private[sql] object DecimalIsFractional extends DecimalIsConflicted with Fractional[Decimal] {
override def div(x: Decimal, y: Decimal): Decimal = x / y
}
/** A [[scala.math.Integral]] evidence parameter for Decimals. */
- object DecimalAsIfIntegral extends DecimalIsConflicted with Integral[Decimal] {
+ private[sql] object DecimalAsIfIntegral extends DecimalIsConflicted with Integral[Decimal] {
override def quot(x: Decimal, y: Decimal): Decimal = x / y
override def rem(x: Decimal, y: Decimal): Decimal = x % y
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala
index 0f8cecd28f..407dc27326 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala
@@ -82,12 +82,12 @@ case class DecimalType(precisionInfo: Option[PrecisionInfo]) extends FractionalT
object DecimalType {
val Unlimited: DecimalType = DecimalType(None)
- object Fixed {
+ private[sql] object Fixed {
def unapply(t: DecimalType): Option[(Int, Int)] =
t.precisionInfo.map(p => (p.precision, p.scale))
}
- object Expression {
+ private[sql] object Expression {
def unapply(e: Expression): Option[(Int, Int)] = e.dataType match {
case t: DecimalType => t.precisionInfo.map(p => (p.precision, p.scale))
case _ => None
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/SQLUserDefinedType.java b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/SQLUserDefinedType.java
index a64d2bb7cd..df64a878b6 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/SQLUserDefinedType.java
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/SQLUserDefinedType.java
@@ -24,11 +24,11 @@ import org.apache.spark.annotation.DeveloperApi;
/**
* ::DeveloperApi::
* A user-defined type which can be automatically recognized by a SQLContext and registered.
- *
+ * <p>
* WARNING: This annotation will only work if both Java and Scala reflection return the same class
* names (after erasure) for the UDT. This will NOT be the case when, e.g., the UDT class
* is enclosed in an object (a singleton).
- *
+ * <p>
* WARNING: UDTs are currently only supported from Scala.
*/
// TODO: Should I used @Documented ?
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/GroupedData.scala b/sql/core/src/main/scala/org/apache/spark/sql/GroupedData.scala
index 516ba2ac23..c4ceb0c173 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/GroupedData.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/GroupedData.scala
@@ -40,22 +40,22 @@ private[sql] object GroupedData {
/**
* The Grouping Type
*/
- trait GroupType
+ private[sql] trait GroupType
/**
* To indicate it's the GroupBy
*/
- object GroupByType extends GroupType
+ private[sql] object GroupByType extends GroupType
/**
* To indicate it's the CUBE
*/
- object CubeType extends GroupType
+ private[sql] object CubeType extends GroupType
/**
* To indicate it's the ROLLUP
*/
- object RollupType extends GroupType
+ private[sql] object RollupType extends GroupType
}
/**
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/expressions/Window.scala b/sql/core/src/main/scala/org/apache/spark/sql/expressions/Window.scala
index d4003b2d9c..e9b60841fc 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/expressions/Window.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/expressions/Window.scala
@@ -79,3 +79,20 @@ object Window {
}
}
+
+/**
+ * :: Experimental ::
+ * Utility functions for defining window in DataFrames.
+ *
+ * {{{
+ * // PARTITION BY country ORDER BY date ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW
+ * Window.partitionBy("country").orderBy("date").rowsBetween(Long.MinValue, 0)
+ *
+ * // PARTITION BY country ORDER BY date ROWS BETWEEN 3 PRECEDING AND 3 FOLLOWING
+ * Window.partitionBy("country").orderBy("date").rowsBetween(-3, 3)
+ * }}}
+ *
+ * @since 1.4.0
+ */
+@Experimental
+class Window private() // So we can see Window in JavaDoc.
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/sources/interfaces.scala b/sql/core/src/main/scala/org/apache/spark/sql/sources/interfaces.scala
index c06026e042..b1b997c030 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/sources/interfaces.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/sources/interfaces.scala
@@ -93,7 +93,7 @@ trait SchemaRelationProvider {
}
/**
- * ::DeveloperApi::
+ * ::Experimental::
* Implemented by objects that produce relations for a specific kind of data source
* with a given schema and partitioned columns. When Spark SQL is given a DDL operation with a
* USING clause specified (to specify the implemented [[HadoopFsRelationProvider]]), a user defined
@@ -115,6 +115,7 @@ trait SchemaRelationProvider {
*
* @since 1.4.0
*/
+@Experimental
trait HadoopFsRelationProvider {
/**
* Returns a new base relation with the given parameters, a user defined schema, and a list of
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
index 3915ee8356..253bf11252 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
@@ -57,7 +57,7 @@ private[hive] case object NativePlaceholder extends LogicalPlan {
override def output: Seq[Attribute] = Seq.empty
}
-case class CreateTableAsSelect(
+private[hive] case class CreateTableAsSelect(
tableDesc: HiveTable,
child: LogicalPlan,
allowExisting: Boolean) extends UnaryNode with Command {
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ReflectionMagic.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ReflectionMagic.scala
index c600b158c5..4d053ae42c 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ReflectionMagic.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ReflectionMagic.scala
@@ -30,7 +30,7 @@ private[client] object ReflectionException {
/**
* Provides implicit functions on any object for calling methods reflectively.
*/
-protected trait ReflectionMagic {
+private[client] trait ReflectionMagic {
/** code for InstanceMagic
println(
(1 to 22).map { n =>