aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark/mllib/common.py
diff options
context:
space:
mode:
authorDavies Liu <davies@databricks.com>2015-12-16 15:48:11 -0800
committerDavies Liu <davies.liu@gmail.com>2015-12-16 15:48:11 -0800
commit27b98e99d21a0cc34955337f82a71a18f9220ab2 (patch)
tree4eb3a116264543d9fc0e1121bc93ccaa5b3113f7 /python/pyspark/mllib/common.py
parent3a44aebd0c5331f6ff00734fa44ef63f8d18cfbb (diff)
downloadspark-27b98e99d21a0cc34955337f82a71a18f9220ab2.tar.gz
spark-27b98e99d21a0cc34955337f82a71a18f9220ab2.tar.bz2
spark-27b98e99d21a0cc34955337f82a71a18f9220ab2.zip
[SPARK-12380] [PYSPARK] use SQLContext.getOrCreate in mllib
MLlib should use SQLContext.getOrCreate() instead of creating new SQLContext. Author: Davies Liu <davies@databricks.com> Closes #10338 from davies/create_context.
Diffstat (limited to 'python/pyspark/mllib/common.py')
-rw-r--r--python/pyspark/mllib/common.py6
1 files changed, 3 insertions, 3 deletions
diff --git a/python/pyspark/mllib/common.py b/python/pyspark/mllib/common.py
index a439a488de..9fda1b1682 100644
--- a/python/pyspark/mllib/common.py
+++ b/python/pyspark/mllib/common.py
@@ -102,7 +102,7 @@ def _java2py(sc, r, encoding="bytes"):
return RDD(jrdd, sc)
if clsName == 'DataFrame':
- return DataFrame(r, SQLContext(sc))
+ return DataFrame(r, SQLContext.getOrCreate(sc))
if clsName in _picklable_classes:
r = sc._jvm.SerDe.dumps(r)
@@ -125,7 +125,7 @@ def callJavaFunc(sc, func, *args):
def callMLlibFunc(name, *args):
""" Call API in PythonMLLibAPI """
- sc = SparkContext._active_spark_context
+ sc = SparkContext.getOrCreate()
api = getattr(sc._jvm.PythonMLLibAPI(), name)
return callJavaFunc(sc, api, *args)
@@ -135,7 +135,7 @@ class JavaModelWrapper(object):
Wrapper for the model in JVM
"""
def __init__(self, java_model):
- self._sc = SparkContext._active_spark_context
+ self._sc = SparkContext.getOrCreate()
self._java_model = java_model
def __del__(self):