File tree 5 files changed +38
-33
lines changed
core/src/main/scala/org/apache/spark
sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver
5 files changed +38
-33
lines changed Original file line number Diff line number Diff line change @@ -28,7 +28,7 @@ import org.apache.spark.network.sasl.SaslServerBootstrap
28
28
import org .apache .spark .network .server .{TransportServerBootstrap , TransportServer }
29
29
import org .apache .spark .network .shuffle .ExternalShuffleBlockHandler
30
30
import org .apache .spark .network .util .TransportConf
31
- import org .apache .spark .util .Utils
31
+ import org .apache .spark .util .{ ShutdownHookManager , Utils }
32
32
33
33
/**
34
34
* Provides a server from which Executors can read shuffle files (rather than reading directly from
@@ -118,19 +118,13 @@ object ExternalShuffleService extends Logging {
118
118
server = newShuffleService(sparkConf, securityManager)
119
119
server.start()
120
120
121
- installShutdownHook()
121
+ ShutdownHookManager .addShutdownHook { () =>
122
+ logInfo(" Shutting down shuffle service." )
123
+ server.stop()
124
+ barrier.countDown()
125
+ }
122
126
123
127
// keep running until the process is terminated
124
128
barrier.await()
125
129
}
126
-
127
- private def installShutdownHook (): Unit = {
128
- Runtime .getRuntime.addShutdownHook(new Thread (" External Shuffle Service shutdown thread" ) {
129
- override def run () {
130
- logInfo(" Shutting down shuffle service." )
131
- server.stop()
132
- barrier.countDown()
133
- }
134
- })
135
- }
136
130
}
Original file line number Diff line number Diff line change @@ -22,7 +22,7 @@ import java.util.concurrent.CountDownLatch
22
22
import org .apache .spark .deploy .mesos .ui .MesosClusterUI
23
23
import org .apache .spark .deploy .rest .mesos .MesosRestServer
24
24
import org .apache .spark .scheduler .cluster .mesos ._
25
- import org .apache .spark .util .SignalLogger
25
+ import org .apache .spark .util .{ ShutdownHookManager , SignalLogger }
26
26
import org .apache .spark .{Logging , SecurityManager , SparkConf }
27
27
28
28
/*
@@ -103,14 +103,11 @@ private[mesos] object MesosClusterDispatcher extends Logging {
103
103
}
104
104
val dispatcher = new MesosClusterDispatcher (dispatcherArgs, conf)
105
105
dispatcher.start()
106
- val shutdownHook = new Thread () {
107
- override def run () {
108
- logInfo(" Shutdown hook is shutting down dispatcher" )
109
- dispatcher.stop()
110
- dispatcher.awaitShutdown()
111
- }
106
+ ShutdownHookManager .addShutdownHook { () =>
107
+ logInfo(" Shutdown hook is shutting down dispatcher" )
108
+ dispatcher.stop()
109
+ dispatcher.awaitShutdown()
112
110
}
113
- Runtime .getRuntime.addShutdownHook(shutdownHook)
114
111
dispatcher.awaitShutdown()
115
112
}
116
113
}
Original file line number Diff line number Diff line change @@ -162,7 +162,9 @@ private[spark] object ShutdownHookManager extends Logging {
162
162
val hook = new Thread {
163
163
override def run () {}
164
164
}
165
+ // scalastyle:off runtimeaddshutdownhook
165
166
Runtime .getRuntime.addShutdownHook(hook)
167
+ // scalastyle:on runtimeaddshutdownhook
166
168
Runtime .getRuntime.removeShutdownHook(hook)
167
169
} catch {
168
170
case ise : IllegalStateException => return true
@@ -228,7 +230,9 @@ private [util] class SparkShutdownHookManager {
228
230
.invoke(shm, hookTask, Integer .valueOf(fsPriority + 30 ))
229
231
230
232
case Failure (_) =>
233
+ // scalastyle:off runtimeaddshutdownhook
231
234
Runtime .getRuntime.addShutdownHook(new Thread (hookTask, " Spark Shutdown Hook" ));
235
+ // scalastyle:on runtimeaddshutdownhook
232
236
}
233
237
}
234
238
Original file line number Diff line number Diff line change @@ -157,6 +157,18 @@ This file is divided into 3 sections:
157
157
]]> </customMessage >
158
158
</check >
159
159
160
+ <check customId =" runtimeaddshutdownhook" level =" error" class =" org.scalastyle.file.RegexChecker" enabled =" true" >
161
+ <parameters ><parameter name =" regex" >Runtime\.getRuntime\.addShutdownHook</parameter ></parameters >
162
+ <customMessage ><![CDATA[
163
+ Are you sure that you want to use Runtime.getRuntime.addShutdownHook? In most cases, you should use
164
+ ShutdownHookManager.addShutdownHook instead.
165
+ If you must use Runtime.getRuntime.addShutdownHook, wrap the code block with
166
+ // scalastyle:off runtimeaddshutdownhook
167
+ Runtime.getRuntime.addShutdownHook(...)
168
+ // scalastyle:on runtimeaddshutdownhook
169
+ ]]> </customMessage >
170
+ </check >
171
+
160
172
<check customId =" classforname" level =" error" class =" org.scalastyle.file.RegexChecker" enabled =" true" >
161
173
<parameters ><parameter name =" regex" >Class\.forName</parameter ></parameters >
162
174
<customMessage ><![CDATA[
Original file line number Diff line number Diff line change @@ -195,20 +195,18 @@ private[hive] object SparkSQLCLIDriver extends Logging {
195
195
}
196
196
197
197
// add shutdown hook to flush the history to history file
198
- Runtime .getRuntime.addShutdownHook(new Thread (new Runnable () {
199
- override def run () = {
200
- reader.getHistory match {
201
- case h : FileHistory =>
202
- try {
203
- h.flush()
204
- } catch {
205
- case e : IOException =>
206
- logWarning(" WARNING: Failed to write command history file: " + e.getMessage)
207
- }
208
- case _ =>
209
- }
198
+ ShutdownHookManager .addShutdownHook { () =>
199
+ reader.getHistory match {
200
+ case h : FileHistory =>
201
+ try {
202
+ h.flush()
203
+ } catch {
204
+ case e : IOException =>
205
+ logWarning(" WARNING: Failed to write command history file: " + e.getMessage)
206
+ }
207
+ case _ =>
210
208
}
211
- }))
209
+ }
212
210
213
211
// TODO: missing
214
212
/*
You can’t perform that action at this time.
0 commit comments