-
Notifications
You must be signed in to change notification settings - Fork 29k
[SPARK-15703] [Scheduler][Core][WebUI] Make ListenerBus event queue size configurable #14269
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 2 commits
9c0cb44
76d9af8
09e855e
41dc57c
889fe66
82feec4
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -19,6 +19,7 @@ package org.apache.spark.internal | |
|
|
||
| import java.util.concurrent.TimeUnit | ||
|
|
||
| import org.apache.spark.SparkException | ||
| import org.apache.spark.launcher.SparkLauncher | ||
| import org.apache.spark.network.util.ByteUnit | ||
|
|
||
|
|
@@ -103,4 +104,16 @@ package object config { | |
| .stringConf | ||
| .checkValues(Set("hive", "in-memory")) | ||
| .createWithDefault("in-memory") | ||
|
|
||
| private[spark] val LISTENER_BUS_EVENT_QUEUE_SIZE = | ||
| ConfigBuilder("spark.scheduler.listenerbus.eventqueue.size") | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Another post-hoc review/complaint: I think that Instead, it might have been better to call it
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I agree. Capacity would have been a better choice. |
||
| .intConf | ||
| .transform((x: Int) => { | ||
|
||
| if (x <= 0) { | ||
| throw new SparkException("spark.scheduler.listenerbus.eventqueue.size must be > 0!") | ||
| } else { | ||
| x | ||
| } | ||
| }) | ||
| .createWithDefault(10000) | ||
| } | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -22,6 +22,7 @@ import java.util.concurrent.atomic.AtomicBoolean | |
|
|
||
| import scala.util.DynamicVariable | ||
|
|
||
| import org.apache.spark.internal.config._ | ||
| import org.apache.spark.SparkContext | ||
| import org.apache.spark.util.Utils | ||
|
|
||
|
|
@@ -32,18 +33,16 @@ import org.apache.spark.util.Utils | |
| * has started will events be actually propagated to all attached listeners. This listener bus | ||
| * is stopped when `stop()` is called, and it will drop further events after stopping. | ||
| */ | ||
| private[spark] class LiveListenerBus extends SparkListenerBus { | ||
| private[spark] class LiveListenerBus(val sparkContext: SparkContext) extends SparkListenerBus { | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I'm modifying LiveListenerBus now and noticed that we're passing in
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I guess we also use this to tear down
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. From what I recall and as you mentioned, a ref to |
||
|
|
||
| self => | ||
|
|
||
| import LiveListenerBus._ | ||
|
|
||
| private var sparkContext: SparkContext = null | ||
|
|
||
| // Cap the capacity of the event queue so we get an explicit error (rather than | ||
| // an OOM exception) if it's perpetually being added to more quickly than it's being drained. | ||
| private val EVENT_QUEUE_CAPACITY = 10000 | ||
| private val eventQueue = new LinkedBlockingQueue[SparkListenerEvent](EVENT_QUEUE_CAPACITY) | ||
| private lazy val EVENT_QUEUE_CAPACITY = sparkContext.conf.get(LISTENER_BUS_EVENT_QUEUE_SIZE) | ||
| private lazy val eventQueue = new LinkedBlockingQueue[SparkListenerEvent](EVENT_QUEUE_CAPACITY) | ||
|
|
||
| // Indicate if `start()` is called | ||
| private val started = new AtomicBoolean(false) | ||
|
|
@@ -96,11 +95,9 @@ private[spark] class LiveListenerBus extends SparkListenerBus { | |
| * listens for any additional events asynchronously while the listener bus is still running. | ||
| * This should only be called once. | ||
| * | ||
| * @param sc Used to stop the SparkContext in case the listener thread dies. | ||
| */ | ||
| def start(sc: SparkContext): Unit = { | ||
| def start(): Unit = { | ||
| if (started.compareAndSet(false, true)) { | ||
| sparkContext = sc | ||
| listenerThread.start() | ||
| } else { | ||
| throw new IllegalStateException(s"$name already started!") | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
remove I think this is unused now.