aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst
diff options
context:
space:
mode:
authorMichael Armbrust <michael@databricks.com>2015-12-08 15:58:35 -0800
committerMichael Armbrust <michael@databricks.com>2015-12-08 15:58:35 -0800
commit39594894232e0b70c5ca8b0df137da0d61223fd5 (patch)
treed123d9be5c6e58f41b02e5c966a39edbc54e9275 /sql/catalyst
parent9494521695a1f1526aae76c0aea34a3bead96251 (diff)
downloadspark-39594894232e0b70c5ca8b0df137da0d61223fd5.tar.gz
spark-39594894232e0b70c5ca8b0df137da0d61223fd5.tar.bz2
spark-39594894232e0b70c5ca8b0df137da0d61223fd5.zip
[SPARK-12069][SQL] Update documentation with Datasets
Author: Michael Armbrust <michael@databricks.com> Closes #10060 from marmbrus/docs.
Diffstat (limited to 'sql/catalyst')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala48
1 files changed, 45 insertions, 3 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala
index 3ca5ade7f3..bb0fdc4c3d 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala
@@ -19,20 +19,60 @@ package org.apache.spark.sql
import java.lang.reflect.Modifier
+import scala.annotation.implicitNotFound
import scala.reflect.{ClassTag, classTag}
+import org.apache.spark.annotation.Experimental
import org.apache.spark.sql.catalyst.encoders.{ExpressionEncoder, encoderFor}
import org.apache.spark.sql.catalyst.expressions.{DecodeUsingSerializer, BoundReference, EncodeUsingSerializer}
import org.apache.spark.sql.types._
/**
+ * :: Experimental ::
* Used to convert a JVM object of type `T` to and from the internal Spark SQL representation.
*
- * Encoders are not intended to be thread-safe and thus they are allow to avoid internal locking
- * and reuse internal buffers to improve performance.
+ * == Scala ==
+ * Encoders are generally created automatically through implicits from a `SQLContext`.
+ *
+ * {{{
+ * import sqlContext.implicits._
+ *
+ * val ds = Seq(1, 2, 3).toDS() // implicitly provided (sqlContext.implicits.newIntEncoder)
+ * }}}
+ *
+ * == Java ==
+ * Encoders are specified by calling static methods on [[Encoders]].
+ *
+ * {{{
+ * List<String> data = Arrays.asList("abc", "abc", "xyz");
+ * Dataset<String> ds = context.createDataset(data, Encoders.STRING());
+ * }}}
+ *
+ * Encoders can be composed into tuples:
+ *
+ * {{{
+ * Encoder<Tuple2<Integer, String>> encoder2 = Encoders.tuple(Encoders.INT(), Encoders.STRING());
+ * List<Tuple2<Integer, String>> data2 = Arrays.asList(new scala.Tuple2(1, "a");
+ * Dataset<Tuple2<Integer, String>> ds2 = context.createDataset(data2, encoder2);
+ * }}}
+ *
+ * Or constructed from Java Beans:
+ *
+ * {{{
+ * Encoders.bean(MyClass.class);
+ * }}}
+ *
+ * == Implementation ==
+ * - Encoders are not required to be thread-safe and thus they do not need to use locks to guard
+ * against concurrent access if they reuse internal buffers to improve performance.
*
* @since 1.6.0
*/
+@Experimental
+@implicitNotFound("Unable to find encoder for type stored in a Dataset. Primitive types " +
+ "(Int, String, etc) and Product types (case classes) are supported by importing " +
+ "sqlContext.implicits._ Support for serializing other types will be added in future " +
+ "releases.")
trait Encoder[T] extends Serializable {
/** Returns the schema of encoding this type of object as a Row. */
@@ -43,10 +83,12 @@ trait Encoder[T] extends Serializable {
}
/**
- * Methods for creating encoders.
+ * :: Experimental ::
+ * Methods for creating an [[Encoder]].
*
* @since 1.6.0
*/
+@Experimental
object Encoders {
/**