aboutsummaryrefslogtreecommitdiff
path: root/python
diff options
context:
space:
mode:
authorDavies Liu <davies@databricks.com>2015-09-08 20:56:22 -0700
committerXiangrui Meng <meng@databricks.com>2015-09-08 20:56:22 -0700
commit3a11e50e21ececbec9708eb487b08196f195cd87 (patch)
tree7928676aebbb4bd486accf446fc51d121f2fc516 /python
parenta1573489a37def97b7c26b798898ffbbdc4defa8 (diff)
downloadspark-3a11e50e21ececbec9708eb487b08196f195cd87.tar.gz
spark-3a11e50e21ececbec9708eb487b08196f195cd87.tar.bz2
spark-3a11e50e21ececbec9708eb487b08196f195cd87.zip
[SPARK-10373] [PYSPARK] move @since into pyspark from sql
cc mengxr Author: Davies Liu <davies@databricks.com> Closes #8657 from davies/move_since.
Diffstat (limited to 'python')
-rw-r--r--python/pyspark/__init__.py16
-rw-r--r--python/pyspark/sql/__init__.py15
-rw-r--r--python/pyspark/sql/column.py2
-rw-r--r--python/pyspark/sql/context.py2
-rw-r--r--python/pyspark/sql/dataframe.py2
-rw-r--r--python/pyspark/sql/functions.py3
-rw-r--r--python/pyspark/sql/group.py2
-rw-r--r--python/pyspark/sql/readwriter.py3
-rw-r--r--python/pyspark/sql/window.py3
9 files changed, 23 insertions, 25 deletions
diff --git a/python/pyspark/__init__.py b/python/pyspark/__init__.py
index 5f70ac6ed8..8475dfb1c6 100644
--- a/python/pyspark/__init__.py
+++ b/python/pyspark/__init__.py
@@ -48,6 +48,22 @@ from pyspark.serializers import MarshalSerializer, PickleSerializer
from pyspark.status import *
from pyspark.profiler import Profiler, BasicProfiler
+
+def since(version):
+ """
+ A decorator that annotates a function to append the version of Spark the function was added.
+ """
+ import re
+ indent_p = re.compile(r'\n( +)')
+
+ def deco(f):
+ indents = indent_p.findall(f.__doc__)
+ indent = ' ' * (min(len(m) for m in indents) if indents else 0)
+ f.__doc__ = f.__doc__.rstrip() + "\n\n%s.. versionadded:: %s" % (indent, version)
+ return f
+ return deco
+
+
# for back compatibility
from pyspark.sql import SQLContext, HiveContext, SchemaRDD, Row
diff --git a/python/pyspark/sql/__init__.py b/python/pyspark/sql/__init__.py
index ad9c891ba1..98eaf52866 100644
--- a/python/pyspark/sql/__init__.py
+++ b/python/pyspark/sql/__init__.py
@@ -44,21 +44,6 @@ Important classes of Spark SQL and DataFrames:
from __future__ import absolute_import
-def since(version):
- """
- A decorator that annotates a function to append the version of Spark the function was added.
- """
- import re
- indent_p = re.compile(r'\n( +)')
-
- def deco(f):
- indents = indent_p.findall(f.__doc__)
- indent = ' ' * (min(len(m) for m in indents) if indents else 0)
- f.__doc__ = f.__doc__.rstrip() + "\n\n%s.. versionadded:: %s" % (indent, version)
- return f
- return deco
-
-
from pyspark.sql.types import Row
from pyspark.sql.context import SQLContext, HiveContext
from pyspark.sql.column import Column
diff --git a/python/pyspark/sql/column.py b/python/pyspark/sql/column.py
index 56e75e8cae..573f65f5bf 100644
--- a/python/pyspark/sql/column.py
+++ b/python/pyspark/sql/column.py
@@ -22,9 +22,9 @@ if sys.version >= '3':
basestring = str
long = int
+from pyspark import since
from pyspark.context import SparkContext
from pyspark.rdd import ignore_unicode_prefix
-from pyspark.sql import since
from pyspark.sql.types import *
__all__ = ["DataFrame", "Column", "SchemaRDD", "DataFrameNaFunctions",
diff --git a/python/pyspark/sql/context.py b/python/pyspark/sql/context.py
index 0ef46c4464..89c8c6e0d9 100644
--- a/python/pyspark/sql/context.py
+++ b/python/pyspark/sql/context.py
@@ -26,9 +26,9 @@ else:
from py4j.protocol import Py4JError
+from pyspark import since
from pyspark.rdd import RDD, _prepare_for_python_RDD, ignore_unicode_prefix
from pyspark.serializers import AutoBatchedSerializer, PickleSerializer
-from pyspark.sql import since
from pyspark.sql.types import Row, StringType, StructType, _verify_type, \
_infer_schema, _has_nulltype, _merge_type, _create_converter
from pyspark.sql.dataframe import DataFrame
diff --git a/python/pyspark/sql/dataframe.py b/python/pyspark/sql/dataframe.py
index e269ef4304..c5bf557912 100644
--- a/python/pyspark/sql/dataframe.py
+++ b/python/pyspark/sql/dataframe.py
@@ -26,11 +26,11 @@ if sys.version >= '3':
else:
from itertools import imap as map
+from pyspark import since
from pyspark.rdd import RDD, _load_from_socket, ignore_unicode_prefix
from pyspark.serializers import BatchedSerializer, PickleSerializer, UTF8Deserializer
from pyspark.storagelevel import StorageLevel
from pyspark.traceback_utils import SCCallSiteSync
-from pyspark.sql import since
from pyspark.sql.types import _parse_datatype_json_string
from pyspark.sql.column import Column, _to_seq, _to_list, _to_java_column
from pyspark.sql.readwriter import DataFrameWriter
diff --git a/python/pyspark/sql/functions.py b/python/pyspark/sql/functions.py
index 4b74a50152..26b8662718 100644
--- a/python/pyspark/sql/functions.py
+++ b/python/pyspark/sql/functions.py
@@ -24,10 +24,9 @@ import sys
if sys.version < "3":
from itertools import imap as map
-from pyspark import SparkContext
+from pyspark import since, SparkContext
from pyspark.rdd import _prepare_for_python_RDD, ignore_unicode_prefix
from pyspark.serializers import PickleSerializer, AutoBatchedSerializer
-from pyspark.sql import since
from pyspark.sql.types import StringType
from pyspark.sql.column import Column, _to_java_column, _to_seq
diff --git a/python/pyspark/sql/group.py b/python/pyspark/sql/group.py
index 04594d5a83..71c0bccc5e 100644
--- a/python/pyspark/sql/group.py
+++ b/python/pyspark/sql/group.py
@@ -15,8 +15,8 @@
# limitations under the License.
#
+from pyspark import since
from pyspark.rdd import ignore_unicode_prefix
-from pyspark.sql import since
from pyspark.sql.column import Column, _to_seq
from pyspark.sql.dataframe import DataFrame
from pyspark.sql.types import *
diff --git a/python/pyspark/sql/readwriter.py b/python/pyspark/sql/readwriter.py
index 3fa6895880..f43d8bf646 100644
--- a/python/pyspark/sql/readwriter.py
+++ b/python/pyspark/sql/readwriter.py
@@ -22,8 +22,7 @@ if sys.version >= '3':
from py4j.java_gateway import JavaClass
-from pyspark import RDD
-from pyspark.sql import since
+from pyspark import RDD, since
from pyspark.sql.column import _to_seq
from pyspark.sql.types import *
diff --git a/python/pyspark/sql/window.py b/python/pyspark/sql/window.py
index eaf4d7e986..57bbe340bb 100644
--- a/python/pyspark/sql/window.py
+++ b/python/pyspark/sql/window.py
@@ -17,8 +17,7 @@
import sys
-from pyspark import SparkContext
-from pyspark.sql import since
+from pyspark import since, SparkContext
from pyspark.sql.column import _to_seq, _to_java_column
__all__ = ["Window", "WindowSpec"]