aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark/sql/context.py
diff options
context:
space:
mode:
Diffstat (limited to 'python/pyspark/sql/context.py')
-rw-r--r--python/pyspark/sql/context.py11
1 files changed, 6 insertions, 5 deletions
diff --git a/python/pyspark/sql/context.py b/python/pyspark/sql/context.py
index a3ea192b28..94856c245b 100644
--- a/python/pyspark/sql/context.py
+++ b/python/pyspark/sql/context.py
@@ -142,7 +142,7 @@ class SQLContext(object):
:return: :class:`UDFRegistration`
"""
- return UDFRegistration(self.sparkSession)
+ return UDFRegistration(self)
@since(1.4)
def range(self, start, end=None, step=1, numPartitions=None):
@@ -195,7 +195,7 @@ class SQLContext(object):
>>> sqlContext.sql("SELECT stringLengthInt('test')").collect()
[Row(stringLengthInt(test)=4)]
"""
- self.sparkSession.registerFunction(name, f, returnType)
+ self.sparkSession.catalog.registerFunction(name, f, returnType)
# TODO(andrew): delete this once we refactor things to take in SparkSession
def _inferSchema(self, rdd, samplingRatio=None):
@@ -301,7 +301,7 @@ class SQLContext(object):
>>> sqlContext.registerDataFrameAsTable(df, "table1")
"""
- self.sparkSession.registerDataFrameAsTable(df, tableName)
+ self.sparkSession.catalog.registerDataFrameAsTable(df, tableName)
@since(1.6)
def dropTempTable(self, tableName):
@@ -310,7 +310,7 @@ class SQLContext(object):
>>> sqlContext.registerDataFrameAsTable(df, "table1")
>>> sqlContext.dropTempTable("table1")
"""
- self._ssql_ctx.dropTempTable(tableName)
+ self.sparkSession.catalog.dropTempTable(tableName)
@since(1.3)
def createExternalTable(self, tableName, path=None, source=None, schema=None, **options):
@@ -327,7 +327,8 @@ class SQLContext(object):
:return: :class:`DataFrame`
"""
- return self.sparkSession.createExternalTable(tableName, path, source, schema, **options)
+ return self.sparkSession.catalog.createExternalTable(
+ tableName, path, source, schema, **options)
@ignore_unicode_prefix
@since(1.0)