aboutsummaryrefslogtreecommitdiff
path: root/sql/hivecontext-compatibility
diff options
context:
space:
mode:
authorSandeep Singh <sandeep@techaddict.me>2016-05-11 17:44:00 -0700
committerAndrew Or <andrew@databricks.com>2016-05-11 17:44:00 -0700
commitdb573fc743d12446dd0421fb45d00c2f541eaf9a (patch)
treedbd694d63616becab4f45b8abdd0b583921cc3a5 /sql/hivecontext-compatibility
parent603f4453a16825cc5773cfe24d6ae4cee5ec949a (diff)
downloadspark-db573fc743d12446dd0421fb45d00c2f541eaf9a.tar.gz
spark-db573fc743d12446dd0421fb45d00c2f541eaf9a.tar.bz2
spark-db573fc743d12446dd0421fb45d00c2f541eaf9a.zip
[SPARK-15072][SQL][PYSPARK] FollowUp: Remove SparkSession.withHiveSupport in PySpark
## What changes were proposed in this pull request? This is a followup of https://github.com/apache/spark/pull/12851 Remove `SparkSession.withHiveSupport` in PySpark and instead use `SparkSession.builder. enableHiveSupport` ## How was this patch tested? Existing tests. Author: Sandeep Singh <sandeep@techaddict.me> Closes #13063 from techaddict/SPARK-15072-followup.
Diffstat (limited to 'sql/hivecontext-compatibility')
-rw-r--r--sql/hivecontext-compatibility/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala2
1 files changed, 1 insertions, 1 deletions
diff --git a/sql/hivecontext-compatibility/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala b/sql/hivecontext-compatibility/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
index aa0485a891..75166f6bea 100644
--- a/sql/hivecontext-compatibility/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
+++ b/sql/hivecontext-compatibility/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
@@ -27,7 +27,7 @@ import org.apache.spark.sql.{SparkSession, SQLContext}
* An instance of the Spark SQL execution engine that integrates with data stored in Hive.
* Configuration for Hive is read from hive-site.xml on the classpath.
*/
-@deprecated("Use SparkSession.withHiveSupport instead", "2.0.0")
+@deprecated("Use SparkSession.builder.enableHiveSupport instead", "2.0.0")
class HiveContext private[hive](
_sparkSession: SparkSession,
isRootContext: Boolean)