From 398859cf12df28a38b1fbf0d740eb14a1af20e63 Mon Sep 17 00:00:00 2001 From: Shixiong Zhu Date: Mon, 7 Mar 2016 15:29:10 -0800 Subject: [PATCH] Don't call SparkUncaughtExceptionHandler in AppClient as it's in driver --- .../apache/spark/deploy/client/AppClient.scala | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/client/AppClient.scala b/core/src/main/scala/org/apache/spark/deploy/client/AppClient.scala index a7a0a78f1456..b9dec62abcca 100644 --- a/core/src/main/scala/org/apache/spark/deploy/client/AppClient.scala +++ b/core/src/main/scala/org/apache/spark/deploy/client/AppClient.scala @@ -125,16 +125,14 @@ private[spark] class AppClient( registerMasterFutures.set(tryRegisterAllMasters()) registrationRetryTimer.set(registrationRetryThread.schedule(new Runnable { override def run(): Unit = { - Utils.tryOrExit { - if (registered.get) { - registerMasterFutures.get.foreach(_.cancel(true)) - registerMasterThreadPool.shutdownNow() - } else if (nthRetry >= REGISTRATION_RETRIES) { - markDead("All masters are unresponsive! Giving up.") - } else { - registerMasterFutures.get.foreach(_.cancel(true)) - registerWithMaster(nthRetry + 1) - } + if (registered.get) { + registerMasterFutures.get.foreach(_.cancel(true)) + registerMasterThreadPool.shutdownNow() + } else if (nthRetry >= REGISTRATION_RETRIES) { + markDead("All masters are unresponsive! Giving up.") + } else { + registerMasterFutures.get.foreach(_.cancel(true)) + registerWithMaster(nthRetry + 1) } } }, REGISTRATION_TIMEOUT_SECONDS, TimeUnit.SECONDS))