From 017cdf2be67776978a940609d610afea79856b17 Mon Sep 17 00:00:00 2001 From: Shixiong Zhu Date: Mon, 7 Mar 2016 20:56:08 -0800 Subject: [SPARK-13711][CORE] Don't call SparkUncaughtExceptionHandler in AppClient as it's in driver ## What changes were proposed in this pull request? AppClient runs in the driver side. It should not call `Utils.tryOrExit` as it will send exception to SparkUncaughtExceptionHandler and call `System.exit`. This PR just removed `Utils.tryOrExit`. ## How was this patch tested? manual tests. Author: Shixiong Zhu Closes #11566 from zsxwing/SPARK-13711. --- .../org/apache/spark/deploy/client/AppClient.scala | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) (limited to 'core') diff --git a/core/src/main/scala/org/apache/spark/deploy/client/AppClient.scala b/core/src/main/scala/org/apache/spark/deploy/client/AppClient.scala index a7a0a78f14..b9dec62abc 100644 --- a/core/src/main/scala/org/apache/spark/deploy/client/AppClient.scala +++ b/core/src/main/scala/org/apache/spark/deploy/client/AppClient.scala @@ -125,16 +125,14 @@ private[spark] class AppClient( registerMasterFutures.set(tryRegisterAllMasters()) registrationRetryTimer.set(registrationRetryThread.schedule(new Runnable { override def run(): Unit = { - Utils.tryOrExit { - if (registered.get) { - registerMasterFutures.get.foreach(_.cancel(true)) - registerMasterThreadPool.shutdownNow() - } else if (nthRetry >= REGISTRATION_RETRIES) { - markDead("All masters are unresponsive! Giving up.") - } else { - registerMasterFutures.get.foreach(_.cancel(true)) - registerWithMaster(nthRetry + 1) - } + if (registered.get) { + registerMasterFutures.get.foreach(_.cancel(true)) + registerMasterThreadPool.shutdownNow() + } else if (nthRetry >= REGISTRATION_RETRIES) { + markDead("All masters are unresponsive! Giving up.") + } else { + registerMasterFutures.get.foreach(_.cancel(true)) + registerWithMaster(nthRetry + 1) } } }, REGISTRATION_TIMEOUT_SECONDS, TimeUnit.SECONDS)) -- cgit v1.2.3