aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark/sql/context.py
diff options
context:
space:
mode:
authorDavies Liu <davies@databricks.com>2015-10-19 16:18:20 -0700
committerAndrew Or <andrew@databricks.com>2015-10-19 16:18:20 -0700
commit232d7f8d42950431f1d9be2a6bb3591fb6ea20d6 (patch)
tree38abfac2a3d362fb4f8e62cc91d57791c6370ad3 /python/pyspark/sql/context.py
parenta1413b3662250dd5e980e8b1f7c3dc4585ab4766 (diff)
downloadspark-232d7f8d42950431f1d9be2a6bb3591fb6ea20d6.tar.gz
spark-232d7f8d42950431f1d9be2a6bb3591fb6ea20d6.tar.bz2
spark-232d7f8d42950431f1d9be2a6bb3591fb6ea20d6.zip
[SPARK-11114][PYSPARK] add getOrCreate for SparkContext/SQLContext in Python
Also added SQLContext.newSession() Author: Davies Liu <davies@databricks.com> Closes #9122 from davies/py_create.
Diffstat (limited to 'python/pyspark/sql/context.py')
-rw-r--r--python/pyspark/sql/context.py27
1 files changed, 27 insertions, 0 deletions
diff --git a/python/pyspark/sql/context.py b/python/pyspark/sql/context.py
index 89c8c6e0d9..79453658a1 100644
--- a/python/pyspark/sql/context.py
+++ b/python/pyspark/sql/context.py
@@ -75,6 +75,8 @@ class SQLContext(object):
SQLContext in the JVM, instead we make all calls to this object.
"""
+ _instantiatedContext = None
+
@ignore_unicode_prefix
def __init__(self, sparkContext, sqlContext=None):
"""Creates a new SQLContext.
@@ -99,6 +101,8 @@ class SQLContext(object):
self._scala_SQLContext = sqlContext
_monkey_patch_RDD(self)
install_exception_handler()
+ if SQLContext._instantiatedContext is None:
+ SQLContext._instantiatedContext = self
@property
def _ssql_ctx(self):
@@ -111,6 +115,29 @@ class SQLContext(object):
self._scala_SQLContext = self._jvm.SQLContext(self._jsc.sc())
return self._scala_SQLContext
+ @classmethod
+ @since(1.6)
+ def getOrCreate(cls, sc):
+ """
+ Get the existing SQLContext or create a new one with given SparkContext.
+
+ :param sc: SparkContext
+ """
+ if cls._instantiatedContext is None:
+ jsqlContext = sc._jvm.SQLContext.getOrCreate(sc._jsc.sc())
+ cls(sc, jsqlContext)
+ return cls._instantiatedContext
+
+ @since(1.6)
+ def newSession(self):
+ """
+ Returns a new SQLContext as new session, that has separate SQLConf,
+ registered temporary tables and UDFs, but shared SparkContext and
+ table cache.
+ """
+ jsqlContext = self._ssql_ctx.newSession()
+ return self.__class__(self._sc, jsqlContext)
+
@since(1.3)
def setConf(self, key, value):
"""Sets the given Spark SQL configuration property.