From db573fc743d12446dd0421fb45d00c2f541eaf9a Mon Sep 17 00:00:00 2001 From: Sandeep Singh Date: Wed, 11 May 2016 17:44:00 -0700 Subject: [SPARK-15072][SQL][PYSPARK] FollowUp: Remove SparkSession.withHiveSupport in PySpark ## What changes were proposed in this pull request? This is a followup of https://github.com/apache/spark/pull/12851 Remove `SparkSession.withHiveSupport` in PySpark and instead use `SparkSession.builder. enableHiveSupport` ## How was this patch tested? Existing tests. Author: Sandeep Singh Closes #13063 from techaddict/SPARK-15072-followup. --- python/pyspark/shell.py | 4 +++- python/pyspark/sql/session.py | 10 ---------- 2 files changed, 3 insertions(+), 11 deletions(-) (limited to 'python') diff --git a/python/pyspark/shell.py b/python/pyspark/shell.py index c6b0eda996..adaa3b5a79 100644 --- a/python/pyspark/shell.py +++ b/python/pyspark/shell.py @@ -41,7 +41,9 @@ atexit.register(lambda: sc.stop()) try: # Try to access HiveConf, it will raise exception if Hive is not added sc._jvm.org.apache.hadoop.hive.conf.HiveConf() - spark = SparkSession.withHiveSupport(sc) + spark = SparkSession.builder\ + .enableHiveSupport()\ + .getOrCreate() except py4j.protocol.Py4JError: spark = SparkSession(sc) except TypeError: diff --git a/python/pyspark/sql/session.py b/python/pyspark/sql/session.py index 04842f6185..4ee9ab8ab2 100644 --- a/python/pyspark/sql/session.py +++ b/python/pyspark/sql/session.py @@ -182,16 +182,6 @@ class SparkSession(object): if SparkSession._instantiatedContext is None: SparkSession._instantiatedContext = self - @classmethod - @since(2.0) - def withHiveSupport(cls, sparkContext): - """Returns a new SparkSession with a catalog backed by Hive. - - :param sparkContext: The underlying :class:`SparkContext`. - """ - jsparkSession = sparkContext._jvm.SparkSession.withHiveSupport(sparkContext._jsc.sc()) - return cls(sparkContext, jsparkSession) - @since(2.0) def newSession(self): """ -- cgit v1.2.3