aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJosh Rosen <joshrosen@apache.org>2014-01-23 18:10:16 -0800
committerJosh Rosen <joshrosen@apache.org>2014-01-23 18:24:51 -0800
commitf83068497ba42c5ea5c636efebca81f684e96177 (patch)
tree69e3115c1df0c34be51bf18836f5c5ef666d19ec
parent61569906ccafe4f1d10a61882d564e4bb16665ef (diff)
downloadspark-f83068497ba42c5ea5c636efebca81f684e96177.tar.gz
spark-f83068497ba42c5ea5c636efebca81f684e96177.tar.bz2
spark-f83068497ba42c5ea5c636efebca81f684e96177.zip
Fix for SPARK-1025: PySpark hang on missing files.
-rw-r--r--core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala9
-rw-r--r--python/pyspark/tests.py11
2 files changed, 20 insertions, 0 deletions
diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
index 57bde8d85f..70516bde8b 100644
--- a/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
@@ -52,6 +52,8 @@ private[spark] class PythonRDD[T: ClassTag](
val env = SparkEnv.get
val worker = env.createPythonWorker(pythonExec, envVars.toMap)
+ @volatile var readerException: Exception = null
+
// Start a thread to feed the process input from our parent's iterator
new Thread("stdin writer for " + pythonExec) {
override def run() {
@@ -82,6 +84,10 @@ private[spark] class PythonRDD[T: ClassTag](
dataOut.flush()
worker.shutdownOutput()
} catch {
+ case e: java.io.FileNotFoundException =>
+ readerException = e
+ // Kill the Python worker process:
+ worker.shutdownOutput()
case e: IOException =>
// This can happen for legitimate reasons if the Python code stops returning data before we are done
// passing elements through, e.g., for take(). Just log a message to say it happened.
@@ -106,6 +112,9 @@ private[spark] class PythonRDD[T: ClassTag](
}
private def read(): Array[Byte] = {
+ if (readerException != null) {
+ throw readerException
+ }
try {
stream.readInt() match {
case length if length > 0 =>
diff --git a/python/pyspark/tests.py b/python/pyspark/tests.py
index acd1ca5676..527104587f 100644
--- a/python/pyspark/tests.py
+++ b/python/pyspark/tests.py
@@ -168,6 +168,17 @@ class TestRDDFunctions(PySparkTestCase):
self.assertEqual("Hello World!", x.strip())
self.assertEqual("Hello World!", y.strip())
+ def test_deleting_input_files(self):
+ # Regression test for SPARK-1025
+ tempFile = NamedTemporaryFile(delete=False)
+ tempFile.write("Hello World!")
+ tempFile.close()
+ data = self.sc.textFile(tempFile.name)
+ filtered_data = data.filter(lambda x: True)
+ self.assertEqual(1, filtered_data.count())
+ os.unlink(tempFile.name)
+ self.assertRaises(Exception, lambda: filtered_data.count())
+
class TestIO(PySparkTestCase):