aboutsummaryrefslogtreecommitdiff
path: root/python
diff options
context:
space:
mode:
authorMichael Armbrust <michael@databricks.com>2014-05-13 21:23:51 -0700
committerReynold Xin <rxin@apache.org>2014-05-13 21:24:01 -0700
commit618b3e6e7d0bb826ed333b803fe0a7214e1b14ad (patch)
treefc805aa227f761776e8ba9f10d6e4a33fa045854 /python
parentef5e9d70fafe9b819a6351fd041d0466e5c1d42d (diff)
downloadspark-618b3e6e7d0bb826ed333b803fe0a7214e1b14ad.tar.gz
spark-618b3e6e7d0bb826ed333b803fe0a7214e1b14ad.tar.bz2
spark-618b3e6e7d0bb826ed333b803fe0a7214e1b14ad.zip
[SQL] Make it possible to create Java/Python SQLContexts from an existing Scala SQLContext.
Author: Michael Armbrust <michael@databricks.com> Closes #761 from marmbrus/existingContext and squashes the following commits: 4651051 [Michael Armbrust] Make it possible to create Java/Python SQLContexts from an existing Scala SQLContext. (cherry picked from commit 44233865cf8020741d862d33cc660c88e9315dea) Signed-off-by: Reynold Xin <rxin@apache.org>
Diffstat (limited to 'python')
-rw-r--r--python/pyspark/sql.py7
1 files changed, 5 insertions, 2 deletions
diff --git a/python/pyspark/sql.py b/python/pyspark/sql.py
index 6789d7002b..bbe69e7d8f 100644
--- a/python/pyspark/sql.py
+++ b/python/pyspark/sql.py
@@ -28,7 +28,7 @@ class SQLContext:
register L{SchemaRDD}s as tables, execute sql over tables, cache tables, and read parquet files.
"""
- def __init__(self, sparkContext):
+ def __init__(self, sparkContext, sqlContext = None):
"""
Create a new SQLContext.
@@ -58,10 +58,13 @@ class SQLContext:
self._jvm = self._sc._jvm
self._pythonToJavaMap = self._jvm.PythonRDD.pythonToJavaMap
+ if sqlContext:
+ self._scala_SQLContext = sqlContext
+
@property
def _ssql_ctx(self):
"""
- Accessor for the JVM SparkSQL context. Subclasses can overrite this property to provide
+ Accessor for the JVM SparkSQL context. Subclasses can override this property to provide
their own JVM Contexts.
"""
if not hasattr(self, '_scala_SQLContext'):