aboutsummaryrefslogtreecommitdiff
path: root/core/src/main/scala
diff options
context:
space:
mode:
authorWilliam Benton <willb@redhat.com>2014-07-14 23:09:13 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-07-14 23:09:13 -0700
commit1f99fea53b5ff994dd4a12b44625d35186e269ff (patch)
tree2f2b1bdc7c89edc7992d6321eff7b77f48d6d9ac /core/src/main/scala
parente2255e4b2c404f31ac9f7af9ed445141af980973 (diff)
downloadspark-1f99fea53b5ff994dd4a12b44625d35186e269ff.tar.gz
spark-1f99fea53b5ff994dd4a12b44625d35186e269ff.tar.bz2
spark-1f99fea53b5ff994dd4a12b44625d35186e269ff.zip
SPARK-2486: Utils.getCallSite is now resilient to bogus frames
When running Spark under certain instrumenting profilers, Utils.getCallSite could crash with an NPE. This commit makes it more resilient to failures occurring while inspecting stack frames. Author: William Benton <willb@redhat.com> Closes #1413 from willb/spark-2486 and squashes the following commits: b7c0274 [William Benton] Use explicit null checks instead of Try() 0f0c1ae [William Benton] Utils.getCallSite is now resilient to bogus frames
Diffstat (limited to 'core/src/main/scala')
-rw-r--r--core/src/main/scala/org/apache/spark/util/Utils.scala6
1 files changed, 5 insertions, 1 deletions
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index a2454e120a..d72c97bbe8 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -809,7 +809,11 @@ private[spark] object Utils extends Logging {
*/
def getCallSite: CallSite = {
val trace = Thread.currentThread.getStackTrace()
- .filterNot(_.getMethodName.contains("getStackTrace"))
+ .filterNot((ste:StackTraceElement) =>
+ // When running under some profilers, the current stack trace might contain some bogus
+ // frames. This is intended to ensure that we don't crash in these situations by
+ // ignoring any frames that we can't examine.
+ (ste == null || ste.getMethodName == null || ste.getMethodName.contains("getStackTrace")))
// Keep crawling up the stack trace until we find the first function not inside of the spark
// package. We track the last (shallowest) contiguous Spark method. This might be an RDD