aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark/sql/readwriter.py
diff options
context:
space:
mode:
Diffstat (limited to 'python/pyspark/sql/readwriter.py')
-rw-r--r--python/pyspark/sql/readwriter.py15
1 files changed, 15 insertions, 0 deletions
diff --git a/python/pyspark/sql/readwriter.py b/python/pyspark/sql/readwriter.py
index e2b27fb587..02b3aab2b1 100644
--- a/python/pyspark/sql/readwriter.py
+++ b/python/pyspark/sql/readwriter.py
@@ -17,6 +17,7 @@
from py4j.java_gateway import JavaClass
+from pyspark.sql import since
from pyspark.sql.column import _to_seq
from pyspark.sql.types import *
@@ -30,6 +31,8 @@ class DataFrameReader(object):
to access this.
::Note: Experimental
+
+ .. versionadded:: 1.4
"""
def __init__(self, sqlContext):
@@ -40,6 +43,7 @@ class DataFrameReader(object):
from pyspark.sql.dataframe import DataFrame
return DataFrame(jdf, self._sqlContext)
+ @since(1.4)
def load(self, path=None, format=None, schema=None, **options):
"""Loads data from a data source and returns it as a :class`DataFrame`.
@@ -63,6 +67,7 @@ class DataFrameReader(object):
else:
return self._df(jreader.load())
+ @since(1.4)
def json(self, path, schema=None):
"""
Loads a JSON file (one object per line) and returns the result as
@@ -107,6 +112,7 @@ class DataFrameReader(object):
jdf = self._jreader.schema(jschema).json(path)
return self._df(jdf)
+ @since(1.4)
def table(self, tableName):
"""Returns the specified table as a :class:`DataFrame`.
@@ -117,6 +123,7 @@ class DataFrameReader(object):
"""
return self._df(self._jreader.table(tableName))
+ @since(1.4)
def parquet(self, *path):
"""Loads a Parquet file, returning the result as a :class:`DataFrame`.
@@ -130,6 +137,7 @@ class DataFrameReader(object):
"""
return self._df(self._jreader.parquet(_to_seq(self._sqlContext._sc, path)))
+ @since(1.4)
def jdbc(self, url, table, column=None, lowerBound=None, upperBound=None, numPartitions=None,
predicates=None, properties={}):
"""
@@ -178,12 +186,15 @@ class DataFrameWriter(object):
to access this.
::Note: Experimental
+
+ .. versionadded:: 1.4
"""
def __init__(self, df):
self._df = df
self._sqlContext = df.sql_ctx
self._jwrite = df._jdf.write()
+ @since(1.4)
def save(self, path=None, format=None, mode="error", **options):
"""
Saves the contents of the :class:`DataFrame` to a data source.
@@ -215,6 +226,7 @@ class DataFrameWriter(object):
else:
jwrite.save(path)
+ @since(1.4)
def saveAsTable(self, name, format=None, mode="error", **options):
"""
Saves the contents of this :class:`DataFrame` to a data source as a table.
@@ -243,6 +255,7 @@ class DataFrameWriter(object):
jwrite = jwrite.option(k, options[k])
return jwrite.saveAsTable(name)
+ @since(1.4)
def json(self, path, mode="error"):
"""
Saves the content of the :class:`DataFrame` in JSON format at the
@@ -261,6 +274,7 @@ class DataFrameWriter(object):
"""
return self._jwrite.mode(mode).json(path)
+ @since(1.4)
def parquet(self, path, mode="error"):
"""
Saves the content of the :class:`DataFrame` in Parquet format at the
@@ -279,6 +293,7 @@ class DataFrameWriter(object):
"""
return self._jwrite.mode(mode).parquet(path)
+ @since(1.4)
def jdbc(self, url, table, mode="error", properties={}):
"""
Saves the content of the :class:`DataFrame` to a external database table