aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark/sql/context.py
diff options
context:
space:
mode:
authorAndrew Or <andrew@databricks.com>2016-04-29 09:34:10 -0700
committerAndrew Or <andrew@databricks.com>2016-04-29 09:34:10 -0700
commita7d0fedc940721d09350f2e57ae85591e0a3d90e (patch)
tree5f96e980e810cd13f36658ed052a1e987c5d261c /python/pyspark/sql/context.py
parent7feeb82cb7f462e44f7e698c7c3b6ac3a77aade4 (diff)
downloadspark-a7d0fedc940721d09350f2e57ae85591e0a3d90e.tar.gz
spark-a7d0fedc940721d09350f2e57ae85591e0a3d90e.tar.bz2
spark-a7d0fedc940721d09350f2e57ae85591e0a3d90e.zip
[SPARK-14988][PYTHON] SparkSession catalog and conf API
## What changes were proposed in this pull request? The `catalog` and `conf` APIs were exposed in `SparkSession` in #12713 and #12669. This patch adds those to the python API. ## How was this patch tested? Python tests. Author: Andrew Or <andrew@databricks.com> Closes #12765 from andrewor14/python-spark-session-more.
Diffstat (limited to 'python/pyspark/sql/context.py')
-rw-r--r--python/pyspark/sql/context.py11
1 files changed, 6 insertions, 5 deletions
diff --git a/python/pyspark/sql/context.py b/python/pyspark/sql/context.py
index a3ea192b28..94856c245b 100644
--- a/python/pyspark/sql/context.py
+++ b/python/pyspark/sql/context.py
@@ -142,7 +142,7 @@ class SQLContext(object):
:return: :class:`UDFRegistration`
"""
- return UDFRegistration(self.sparkSession)
+ return UDFRegistration(self)
@since(1.4)
def range(self, start, end=None, step=1, numPartitions=None):
@@ -195,7 +195,7 @@ class SQLContext(object):
>>> sqlContext.sql("SELECT stringLengthInt('test')").collect()
[Row(stringLengthInt(test)=4)]
"""
- self.sparkSession.registerFunction(name, f, returnType)
+ self.sparkSession.catalog.registerFunction(name, f, returnType)
# TODO(andrew): delete this once we refactor things to take in SparkSession
def _inferSchema(self, rdd, samplingRatio=None):
@@ -301,7 +301,7 @@ class SQLContext(object):
>>> sqlContext.registerDataFrameAsTable(df, "table1")
"""
- self.sparkSession.registerDataFrameAsTable(df, tableName)
+ self.sparkSession.catalog.registerDataFrameAsTable(df, tableName)
@since(1.6)
def dropTempTable(self, tableName):
@@ -310,7 +310,7 @@ class SQLContext(object):
>>> sqlContext.registerDataFrameAsTable(df, "table1")
>>> sqlContext.dropTempTable("table1")
"""
- self._ssql_ctx.dropTempTable(tableName)
+ self.sparkSession.catalog.dropTempTable(tableName)
@since(1.3)
def createExternalTable(self, tableName, path=None, source=None, schema=None, **options):
@@ -327,7 +327,8 @@ class SQLContext(object):
:return: :class:`DataFrame`
"""
- return self.sparkSession.createExternalTable(tableName, path, source, schema, **options)
+ return self.sparkSession.catalog.createExternalTable(
+ tableName, path, source, schema, **options)
@ignore_unicode_prefix
@since(1.0)