diff options
author | Holden Karau <holden@pigscanfly.ca> | 2015-05-21 14:08:57 -0700 |
---|---|---|
committer | Josh Rosen <joshrosen@databricks.com> | 2015-05-21 14:08:57 -0700 |
commit | 6b18cdc1b1284b1d48d637d06a1e64829aeb6202 (patch) | |
tree | f058692f7e69162e48725eece288e4d65336e839 /python/pyspark | |
parent | 3d0cccc85850ca9c79f3e5ff7395bd04d212b063 (diff) | |
download | spark-6b18cdc1b1284b1d48d637d06a1e64829aeb6202.tar.gz spark-6b18cdc1b1284b1d48d637d06a1e64829aeb6202.tar.bz2 spark-6b18cdc1b1284b1d48d637d06a1e64829aeb6202.zip |
[SPARK-7711] Add a startTime property to match the corresponding one in Scala
Author: Holden Karau <holden@pigscanfly.ca>
Closes #6275 from holdenk/SPARK-771-startTime-is-missing-from-pyspark and squashes the following commits:
06662dc [Holden Karau] add mising blank line for style checks
7a87410 [Holden Karau] add back missing newline
7a7876b [Holden Karau] Add a startTime property to match the corresponding one in the Scala SparkContext
Diffstat (limited to 'python/pyspark')
-rw-r--r-- | python/pyspark/context.py | 5 | ||||
-rw-r--r-- | python/pyspark/tests.py | 4 |
2 files changed, 9 insertions, 0 deletions
diff --git a/python/pyspark/context.py b/python/pyspark/context.py index 1f2b40b29f..aeb7ad4f2f 100644 --- a/python/pyspark/context.py +++ b/python/pyspark/context.py @@ -292,6 +292,11 @@ class SparkContext(object): return self._jsc.version() @property + def startTime(self): + """Return the epoch time when the Spark Context was started.""" + return self._jsc.startTime() + + @property def defaultParallelism(self): """ Default level of parallelism to use when not given by user (e.g. for diff --git a/python/pyspark/tests.py b/python/pyspark/tests.py index d8e319994c..f9fb37f7fc 100644 --- a/python/pyspark/tests.py +++ b/python/pyspark/tests.py @@ -1809,6 +1809,10 @@ class ContextTests(unittest.TestCase): sc.stop() + def test_startTime(self): + with SparkContext() as sc: + self.assertGreater(sc.startTime, 0) + @unittest.skipIf(not _have_scipy, "SciPy not installed") class SciPyTests(PySparkTestCase): |