diff options
author | Reynold Xin <rxin@databricks.com> | 2016-02-13 21:06:31 -0800 |
---|---|---|
committer | Reynold Xin <rxin@databricks.com> | 2016-02-13 21:06:31 -0800 |
commit | 354d4c24be892271bd9a9eab6ceedfbc5d671c9c (patch) | |
tree | c0503ad0c303e6db4882bdbfa356fb78a8dd32fb /python | |
parent | 388cd9ea8db2e438ebef9dfb894298f843438c43 (diff) | |
download | spark-354d4c24be892271bd9a9eab6ceedfbc5d671c9c.tar.gz spark-354d4c24be892271bd9a9eab6ceedfbc5d671c9c.tar.bz2 spark-354d4c24be892271bd9a9eab6ceedfbc5d671c9c.zip |
[SPARK-13296][SQL] Move UserDefinedFunction into sql.expressions.
This pull request has the following changes:
1. Moved UserDefinedFunction into expressions package. This is more consistent with how we structure the packages for window functions and UDAFs.
2. Moved UserDefinedPythonFunction into execution.python package, so we don't have a random private class in the top level sql package.
3. Move everything in execution/python.scala into the newly created execution.python package.
Most of the diffs are just straight copy-paste.
Author: Reynold Xin <rxin@databricks.com>
Closes #11181 from rxin/SPARK-13296.
Diffstat (limited to 'python')
-rw-r--r-- | python/pyspark/sql/dataframe.py | 2 | ||||
-rw-r--r-- | python/pyspark/sql/functions.py | 6 |
2 files changed, 4 insertions, 4 deletions
diff --git a/python/pyspark/sql/dataframe.py b/python/pyspark/sql/dataframe.py index 3104e41407..83b034fe77 100644 --- a/python/pyspark/sql/dataframe.py +++ b/python/pyspark/sql/dataframe.py @@ -262,7 +262,7 @@ class DataFrame(object): [Row(age=2, name=u'Alice'), Row(age=5, name=u'Bob')] """ with SCCallSiteSync(self._sc) as css: - port = self._sc._jvm.org.apache.spark.sql.execution.EvaluatePython.takeAndServe( + port = self._sc._jvm.org.apache.spark.sql.execution.python.EvaluatePython.takeAndServe( self._jdf, num) return list(_load_from_socket(port, BatchedSerializer(PickleSerializer()))) diff --git a/python/pyspark/sql/functions.py b/python/pyspark/sql/functions.py index 416d722bba..5fc1cc2cae 100644 --- a/python/pyspark/sql/functions.py +++ b/python/pyspark/sql/functions.py @@ -1652,9 +1652,9 @@ class UserDefinedFunction(object): jdt = ctx._ssql_ctx.parseDataType(self.returnType.json()) if name is None: name = f.__name__ if hasattr(f, '__name__') else f.__class__.__name__ - judf = sc._jvm.UserDefinedPythonFunction(name, bytearray(pickled_command), env, includes, - sc.pythonExec, sc.pythonVer, broadcast_vars, - sc._javaAccumulator, jdt) + judf = sc._jvm.org.apache.spark.sql.execution.python.UserDefinedPythonFunction( + name, bytearray(pickled_command), env, includes, sc.pythonExec, sc.pythonVer, + broadcast_vars, sc._javaAccumulator, jdt) return judf def __del__(self): |