aboutsummaryrefslogtreecommitdiff
path: root/python
diff options
context:
space:
mode:
authorMichael Armbrust <michael@databricks.com>2014-05-13 21:23:51 -0700
committerReynold Xin <rxin@apache.org>2014-05-13 21:23:51 -0700
commit44233865cf8020741d862d33cc660c88e9315dea (patch)
tree09f5181583b21ebda534bfa0b50c2efbcea8505d /python
parent753b04dea4b04ba9d0dd0011f00e9d70367e76fc (diff)
downloadspark-44233865cf8020741d862d33cc660c88e9315dea.tar.gz
spark-44233865cf8020741d862d33cc660c88e9315dea.tar.bz2
spark-44233865cf8020741d862d33cc660c88e9315dea.zip
[SQL] Make it possible to create Java/Python SQLContexts from an existing Scala SQLContext.
Author: Michael Armbrust <michael@databricks.com> Closes #761 from marmbrus/existingContext and squashes the following commits: 4651051 [Michael Armbrust] Make it possible to create Java/Python SQLContexts from an existing Scala SQLContext.
Diffstat (limited to 'python')
-rw-r--r--python/pyspark/sql.py7
1 files changed, 5 insertions, 2 deletions
diff --git a/python/pyspark/sql.py b/python/pyspark/sql.py
index 6789d7002b..bbe69e7d8f 100644
--- a/python/pyspark/sql.py
+++ b/python/pyspark/sql.py
@@ -28,7 +28,7 @@ class SQLContext:
register L{SchemaRDD}s as tables, execute sql over tables, cache tables, and read parquet files.
"""
- def __init__(self, sparkContext):
+ def __init__(self, sparkContext, sqlContext = None):
"""
Create a new SQLContext.
@@ -58,10 +58,13 @@ class SQLContext:
self._jvm = self._sc._jvm
self._pythonToJavaMap = self._jvm.PythonRDD.pythonToJavaMap
+ if sqlContext:
+ self._scala_SQLContext = sqlContext
+
@property
def _ssql_ctx(self):
"""
- Accessor for the JVM SparkSQL context. Subclasses can overrite this property to provide
+ Accessor for the JVM SparkSQL context. Subclasses can override this property to provide
their own JVM Contexts.
"""
if not hasattr(self, '_scala_SQLContext'):