aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark/context.py
diff options
context:
space:
mode:
authorPatrick Wendell <pwendell@gmail.com>2014-01-01 16:10:51 -0800
committerPatrick Wendell <pwendell@gmail.com>2014-01-01 16:10:51 -0800
commitf8d245bdfc703eefa4fd34795739a1a851031f5b (patch)
tree0c9fa7f5f2b4ec581ac608ad3ae96b78470bff05 /python/pyspark/context.py
parent37c43c9dd1947e7cc99a310214b323707948f087 (diff)
parent9a0ff721c9e4c8f52aadfdde6ac2764d3cba9797 (diff)
downloadspark-f8d245bdfc703eefa4fd34795739a1a851031f5b.tar.gz
spark-f8d245bdfc703eefa4fd34795739a1a851031f5b.tar.bz2
spark-f8d245bdfc703eefa4fd34795739a1a851031f5b.zip
Merge remote-tracking branch 'apache-github/master' into log4j-fix-2
Conflicts: streaming/src/main/scala/org/apache/spark/streaming/scheduler/JobGenerator.scala
Diffstat (limited to 'python/pyspark/context.py')
-rw-r--r--python/pyspark/context.py9
1 files changed, 2 insertions, 7 deletions
diff --git a/python/pyspark/context.py b/python/pyspark/context.py
index 0604f6836c..108f36576a 100644
--- a/python/pyspark/context.py
+++ b/python/pyspark/context.py
@@ -320,17 +320,12 @@ class SparkContext(object):
self._python_includes.append(filename)
sys.path.append(os.path.join(SparkFiles.getRootDirectory(), filename)) # for tests in local mode
- def setCheckpointDir(self, dirName, useExisting=False):
+ def setCheckpointDir(self, dirName):
"""
Set the directory under which RDDs are going to be checkpointed. The
directory must be a HDFS path if running on a cluster.
-
- If the directory does not exist, it will be created. If the directory
- exists and C{useExisting} is set to true, then the exisiting directory
- will be used. Otherwise an exception will be thrown to prevent
- accidental overriding of checkpoint files in the existing directory.
"""
- self._jsc.sc().setCheckpointDir(dirName, useExisting)
+ self._jsc.sc().setCheckpointDir(dirName)
def _getJavaStorageLevel(self, storageLevel):
"""