aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorLiang-Chi Hsieh <simonh@tw.ibm.com>2016-05-09 11:05:55 -0700
committerAndrew Or <andrew@databricks.com>2016-05-09 11:06:08 -0700
commite083db2e9e87bf8b6c135568c2b4860f772fc277 (patch)
treeca485b0b79a6cf4ed9204668fe294bc22e141290 /sql
parent65b4ab281efd170c9fad7152629f68eaef7f7088 (diff)
downloadspark-e083db2e9e87bf8b6c135568c2b4860f772fc277.tar.gz
spark-e083db2e9e87bf8b6c135568c2b4860f772fc277.tar.bz2
spark-e083db2e9e87bf8b6c135568c2b4860f772fc277.zip
[SPARK-15225][SQL] Replace SQLContext with SparkSession in Encoder documentation
`Encoder`'s doc mentions `sqlContext.implicits._`. We should use `sparkSession.implicits._` instead now. Only doc update. Author: Liang-Chi Hsieh <simonh@tw.ibm.com> Closes #13002 from viirya/encoder-doc.
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala8
1 files changed, 4 insertions, 4 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala
index ffa694fcdc..501c1304db 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala
@@ -29,13 +29,13 @@ import org.apache.spark.sql.types._
* Used to convert a JVM object of type `T` to and from the internal Spark SQL representation.
*
* == Scala ==
- * Encoders are generally created automatically through implicits from a `SQLContext`, or can be
+ * Encoders are generally created automatically through implicits from a `SparkSession`, or can be
* explicitly created by calling static methods on [[Encoders]].
*
* {{{
- * import sqlContext.implicits._
+ * import spark.implicits._
*
- * val ds = Seq(1, 2, 3).toDS() // implicitly provided (sqlContext.implicits.newIntEncoder)
+ * val ds = Seq(1, 2, 3).toDS() // implicitly provided (spark.implicits.newIntEncoder)
* }}}
*
* == Java ==
@@ -69,7 +69,7 @@ import org.apache.spark.sql.types._
@Experimental
@implicitNotFound("Unable to find encoder for type stored in a Dataset. Primitive types " +
"(Int, String, etc) and Product types (case classes) are supported by importing " +
- "sqlContext.implicits._ Support for serializing other types will be added in future " +
+ "spark.implicits._ Support for serializing other types will be added in future " +
"releases.")
trait Encoder[T] extends Serializable {