aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorBogdan Raducanu <bogdan@databricks.com>2017-03-16 15:25:45 +0100
committerHerman van Hovell <hvanhovell@databricks.com>2017-03-16 15:25:45 +0100
commitee91a0decc389572099ea7c038149cc50375a2ef (patch)
tree623b09f89e5f6ab1a4135cbddf5caeac88e7731c /core
parentd647aae278ef31a07fc64715eb07e48294d94bb8 (diff)
downloadspark-ee91a0decc389572099ea7c038149cc50375a2ef.tar.gz
spark-ee91a0decc389572099ea7c038149cc50375a2ef.tar.bz2
spark-ee91a0decc389572099ea7c038149cc50375a2ef.zip
[SPARK-19946][TESTING] DebugFilesystem.assertNoOpenStreams should report the open streams to help debugging
## What changes were proposed in this pull request? DebugFilesystem.assertNoOpenStreams throws an exception with a cause exception that actually shows the code line which leaked the stream. ## How was this patch tested? New test in SparkContextSuite to check there is a cause exception. Author: Bogdan Raducanu <bogdan@databricks.com> Closes #17292 from bogdanrdc/SPARK-19946.
Diffstat (limited to 'core')
-rw-r--r--core/src/test/scala/org/apache/spark/DebugFilesystem.scala3
-rw-r--r--core/src/test/scala/org/apache/spark/SparkContextSuite.scala20
2 files changed, 21 insertions, 2 deletions
diff --git a/core/src/test/scala/org/apache/spark/DebugFilesystem.scala b/core/src/test/scala/org/apache/spark/DebugFilesystem.scala
index fb8d701ebd..72aea84111 100644
--- a/core/src/test/scala/org/apache/spark/DebugFilesystem.scala
+++ b/core/src/test/scala/org/apache/spark/DebugFilesystem.scala
@@ -44,7 +44,8 @@ object DebugFilesystem extends Logging {
logWarning("Leaked filesystem connection created at:")
exc.printStackTrace()
}
- throw new RuntimeException(s"There are $numOpen possibly leaked file streams.")
+ throw new IllegalStateException(s"There are $numOpen possibly leaked file streams.",
+ openStreams.values().asScala.head)
}
}
}
diff --git a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
index f97a112ec1..d08a162fed 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
@@ -18,7 +18,7 @@
package org.apache.spark
import java.io.File
-import java.net.MalformedURLException
+import java.net.{MalformedURLException, URI}
import java.nio.charset.StandardCharsets
import java.util.concurrent.TimeUnit
@@ -26,6 +26,8 @@ import scala.concurrent.duration._
import scala.concurrent.Await
import com.google.common.io.Files
+import org.apache.hadoop.conf.Configuration
+import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.hadoop.io.{BytesWritable, LongWritable, Text}
import org.apache.hadoop.mapred.TextInputFormat
import org.apache.hadoop.mapreduce.lib.input.{TextInputFormat => NewTextInputFormat}
@@ -538,6 +540,22 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext with Eventu
}
}
+ test("SPARK-19446: DebugFilesystem.assertNoOpenStreams should report " +
+ "open streams to help debugging") {
+ val fs = new DebugFilesystem()
+ fs.initialize(new URI("file:///"), new Configuration())
+ val file = File.createTempFile("SPARK19446", "temp")
+ Files.write(Array.ofDim[Byte](1000), file)
+ val path = new Path("file:///" + file.getCanonicalPath)
+ val stream = fs.open(path)
+ val exc = intercept[RuntimeException] {
+ DebugFilesystem.assertNoOpenStreams()
+ }
+ assert(exc != null)
+ assert(exc.getCause() != null)
+ stream.close()
+ }
+
}
object SparkContextSuite {