Skip to content

Commit 29c9b5b

Browse files
committed
Disable SparkUI for tests
1 parent ed1980f commit 29c9b5b

File tree

11 files changed

+83
-31
lines changed

11 files changed

+83
-31
lines changed

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -220,8 +220,14 @@ class SparkContext(config: SparkConf) extends Logging {
220220
new MetadataCleaner(MetadataCleanerType.SPARK_CONTEXT, this.cleanup, conf)
221221

222222
// Initialize the Spark UI, registering all associated listeners
223-
private[spark] val ui = new SparkUI(this)
224-
ui.bind()
223+
private[spark] val ui: Option[SparkUI] =
224+
if (conf.getBoolean("spark.ui.enabled", true)) {
225+
Some(new SparkUI(this))
226+
} else {
227+
// For tests, do not enable the UI
228+
None
229+
}
230+
ui.foreach(_.bind())
225231

226232
/** A default Hadoop Configuration for the Hadoop code (e.g. file systems) that we reuse. */
227233
val hadoopConfiguration = SparkHadoopUtil.get.newConfiguration(conf)
@@ -990,7 +996,7 @@ class SparkContext(config: SparkConf) extends Logging {
990996
/** Shut down the SparkContext. */
991997
def stop() {
992998
postApplicationEnd()
993-
ui.stop()
999+
ui.foreach(_.stop())
9941000
// Do this only if not stopped already - best case effort.
9951001
// prevent NPE if stopped more than once.
9961002
val dagSchedulerCopy = dagScheduler

core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -292,7 +292,7 @@ class CoarseGrainedSchedulerBackend(scheduler: TaskSchedulerImpl, actorSystem: A
292292
logInfo(s"Add WebUI Filter. $filterName, $filterParams, $proxyBase")
293293
conf.set("spark.ui.filters", filterName)
294294
conf.set(s"spark.$filterName.params", filterParams)
295-
JettyUtils.addFilters(scheduler.sc.ui.getHandlers, conf)
295+
scheduler.sc.ui.foreach { ui => JettyUtils.addFilters(ui.getHandlers, conf) }
296296
}
297297
}
298298
}

core/src/main/scala/org/apache/spark/scheduler/cluster/SimrSchedulerBackend.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717

1818
package org.apache.spark.scheduler.cluster
1919

20-
import org.apache.hadoop.conf.Configuration
2120
import org.apache.hadoop.fs.{Path, FileSystem}
2221

2322
import org.apache.spark.{Logging, SparkContext, SparkEnv}
@@ -47,16 +46,17 @@ private[spark] class SimrSchedulerBackend(
4746

4847
val conf = SparkHadoopUtil.get.newConfiguration(sc.conf)
4948
val fs = FileSystem.get(conf)
49+
val appUIAddress = sc.ui.map(_.appUIAddress).getOrElse("")
5050

5151
logInfo("Writing to HDFS file: " + driverFilePath)
5252
logInfo("Writing Akka address: " + driverUrl)
53-
logInfo("Writing Spark UI Address: " + sc.ui.appUIAddress)
53+
logInfo("Writing Spark UI Address: " + appUIAddress)
5454

5555
// Create temporary file to prevent race condition where executors get empty driverUrl file
5656
val temp = fs.create(tmpPath, true)
5757
temp.writeUTF(driverUrl)
5858
temp.writeInt(maxCores)
59-
temp.writeUTF(sc.ui.appUIAddress)
59+
temp.writeUTF(appUIAddress)
6060
temp.close()
6161

6262
// "Atomic" rename

core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,8 +67,10 @@ private[spark] class SparkDeploySchedulerBackend(
6767
val javaOpts = sparkJavaOpts ++ extraJavaOpts
6868
val command = Command("org.apache.spark.executor.CoarseGrainedExecutorBackend",
6969
args, sc.executorEnvs, classPathEntries, libraryPathEntries, javaOpts)
70+
val appUIAddress = sc.ui.map(_.appUIAddress).getOrElse("")
71+
val eventLogDir = sc.eventLogger.map(_.logDir)
7072
val appDesc = new ApplicationDescription(sc.appName, maxCores, sc.executorMemory, command,
71-
sc.ui.appUIAddress, sc.eventLogger.map(_.logDir))
73+
appUIAddress, eventLogDir)
7274

7375
client = new AppClient(sc.env.actorSystem, masters, appDesc, this, conf)
7476
client.start()

core/src/test/scala/org/apache/spark/ui/UISuite.scala

Lines changed: 31 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -36,11 +36,25 @@ import scala.xml.Node
3636

3737
class UISuite extends FunSuite {
3838

39+
/**
40+
* Create a test SparkContext with the SparkUI enabled.
41+
* It is safe to `get` the SparkUI directly from the SparkContext returned here.
42+
*/
43+
private def newSparkContext(): SparkContext = {
44+
val conf = new SparkConf()
45+
.setMaster("local")
46+
.setAppName("test")
47+
.set("spark.ui.enabled", "true")
48+
val sc = new SparkContext(conf)
49+
assert(sc.ui.isDefined)
50+
sc
51+
}
52+
3953
ignore("basic ui visibility") {
40-
withSpark(new SparkContext("local", "test")) { sc =>
54+
withSpark(newSparkContext()) { sc =>
4155
// test if the ui is visible, and all the expected tabs are visible
4256
eventually(timeout(10 seconds), interval(50 milliseconds)) {
43-
val html = Source.fromURL(sc.ui.appUIAddress).mkString
57+
val html = Source.fromURL(sc.ui.get.appUIAddress).mkString
4458
assert(!html.contains("random data that should not be present"))
4559
assert(html.toLowerCase.contains("stages"))
4660
assert(html.toLowerCase.contains("storage"))
@@ -51,7 +65,7 @@ class UISuite extends FunSuite {
5165
}
5266

5367
ignore("visibility at localhost:4040") {
54-
withSpark(new SparkContext("local", "test")) { sc =>
68+
withSpark(newSparkContext()) { sc =>
5569
// test if visible from http://localhost:4040
5670
eventually(timeout(10 seconds), interval(50 milliseconds)) {
5771
val html = Source.fromURL("http://localhost:4040").mkString
@@ -61,8 +75,8 @@ class UISuite extends FunSuite {
6175
}
6276

6377
ignore("attaching a new tab") {
64-
withSpark(new SparkContext("local", "test")) { sc =>
65-
val sparkUI = sc.ui
78+
withSpark(newSparkContext()) { sc =>
79+
val sparkUI = sc.ui.get
6680

6781
val newTab = new WebUITab(sparkUI, "foo") {
6882
attachPage(new WebUIPage("") {
@@ -73,7 +87,7 @@ class UISuite extends FunSuite {
7387
}
7488
sparkUI.attachTab(newTab)
7589
eventually(timeout(10 seconds), interval(50 milliseconds)) {
76-
val html = Source.fromURL(sc.ui.appUIAddress).mkString
90+
val html = Source.fromURL(sparkUI.appUIAddress).mkString
7791
assert(!html.contains("random data that should not be present"))
7892

7993
// check whether new page exists
@@ -87,7 +101,7 @@ class UISuite extends FunSuite {
87101
}
88102

89103
eventually(timeout(10 seconds), interval(50 milliseconds)) {
90-
val html = Source.fromURL(sc.ui.appUIAddress.stripSuffix("/") + "/foo").mkString
104+
val html = Source.fromURL(sparkUI.appUIAddress.stripSuffix("/") + "/foo").mkString
91105
// check whether new page exists
92106
assert(html.contains("magic"))
93107
}
@@ -129,16 +143,20 @@ class UISuite extends FunSuite {
129143
}
130144

131145
test("verify appUIAddress contains the scheme") {
132-
withSpark(new SparkContext("local", "test")) { sc =>
133-
val uiAddress = sc.ui.appUIAddress
134-
assert(uiAddress.equals("http://" + sc.ui.appUIHostPort))
146+
withSpark(newSparkContext()) { sc =>
147+
val ui = sc.ui.get
148+
val uiAddress = ui.appUIAddress
149+
val uiHostPort = ui.appUIHostPort
150+
assert(uiAddress.equals("http://" + uiHostPort))
135151
}
136152
}
137153

138154
test("verify appUIAddress contains the port") {
139-
withSpark(new SparkContext("local", "test")) { sc =>
140-
val splitUIAddress = sc.ui.appUIAddress.split(':')
141-
assert(splitUIAddress(2).toInt == sc.ui.boundPort)
155+
withSpark(newSparkContext()) { sc =>
156+
val ui = sc.ui.get
157+
val splitUIAddress = ui.appUIAddress.split(':')
158+
val boundPort = ui.boundPort
159+
assert(splitUIAddress(2).toInt == boundPort)
142160
}
143161
}
144162
}

pom.xml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -900,6 +900,7 @@
900900
<spark.test.home>${session.executionRootDirectory}</spark.test.home>
901901
<spark.testing>1</spark.testing>
902902
<spark.ui.port>0</spark.ui.port>
903+
<spark.ui.enabled>false</spark.ui.enabled>
903904
</systemProperties>
904905
</configuration>
905906
<executions>

project/SparkBuild.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -338,6 +338,7 @@ object TestSettings {
338338
javaOptions in Test += "-Dspark.testing=1",
339339
javaOptions in Test += "-Dspark.ports.maxRetries=100",
340340
javaOptions in Test += "-Dspark.ui.port=0",
341+
javaOptions in Test += "-Dspark.ui.enabled=false",
341342
javaOptions in Test += "-Dsun.io.serialization.extendedDebugInfo=true",
342343
javaOptions in Test ++= System.getProperties.filter(_._1 startsWith "spark")
343344
.map { case (k,v) => s"-D$k=$v" }.toSeq,

streaming/src/main/scala/org/apache/spark/streaming/ui/StreamingTab.scala

Lines changed: 18 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -17,18 +17,31 @@
1717

1818
package org.apache.spark.streaming.ui
1919

20-
import org.apache.spark.Logging
20+
import org.apache.spark.{Logging, SparkException}
2121
import org.apache.spark.streaming.StreamingContext
22-
import org.apache.spark.ui.SparkUITab
22+
import org.apache.spark.ui.{SparkUI, SparkUITab}
2323

24-
/** Spark Web UI tab that shows statistics of a streaming job */
24+
import StreamingTab._
25+
26+
/**
27+
* Spark Web UI tab that shows statistics of a streaming job.
28+
* This assumes the given SparkContext has enabled its SparkUI.
29+
*/
2530
private[spark] class StreamingTab(ssc: StreamingContext)
26-
extends SparkUITab(ssc.sc.ui, "streaming") with Logging {
31+
extends SparkUITab(getSparkUI(ssc), "streaming") with Logging {
2732

28-
val parent = ssc.sc.ui
33+
val parent = getSparkUI(ssc)
2934
val listener = new StreamingJobProgressListener(ssc)
3035

3136
ssc.addStreamingListener(listener)
3237
attachPage(new StreamingPage(this))
3338
parent.attachTab(this)
3439
}
40+
41+
private object StreamingTab {
42+
def getSparkUI(ssc: StreamingContext): SparkUI = {
43+
ssc.sc.ui.getOrElse {
44+
throw new SparkException("Parent SparkUI to attach this tab to not found!")
45+
}
46+
}
47+
}

streaming/src/test/scala/org/apache/spark/streaming/UISuite.scala

Lines changed: 14 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -28,9 +28,18 @@ class UISuite extends FunSuite {
2828

2929
// Ignored: See SPARK-1530
3030
ignore("streaming tab in spark UI") {
31+
32+
// For this test, we have to manually set the system property to enable the SparkUI
33+
// here because there is no appropriate StreamingContext constructor. We just have
34+
// to make sure we remember to restore the original value after the test.
35+
val oldSparkUIEnabled = sys.props.get("spark.ui.enabled").getOrElse("false")
36+
sys.props("spark.ui.enabled") = "true"
3137
val ssc = new StreamingContext("local", "test", Seconds(1))
38+
assert(ssc.sc.ui.isDefined, "Spark UI is not started!")
39+
val ui = ssc.sc.ui.get
40+
3241
eventually(timeout(10 seconds), interval(50 milliseconds)) {
33-
val html = Source.fromURL(ssc.sparkContext.ui.appUIAddress).mkString
42+
val html = Source.fromURL(ui.appUIAddress).mkString
3443
assert(!html.contains("random data that should not be present"))
3544
// test if streaming tab exist
3645
assert(html.toLowerCase.contains("streaming"))
@@ -39,10 +48,12 @@ class UISuite extends FunSuite {
3948
}
4049

4150
eventually(timeout(10 seconds), interval(50 milliseconds)) {
42-
val html = Source.fromURL(
43-
ssc.sparkContext.ui.appUIAddress.stripSuffix("/") + "/streaming").mkString
51+
val html = Source.fromURL(ui.appUIAddress.stripSuffix("/") + "/streaming").mkString
4452
assert(html.toLowerCase.contains("batch"))
4553
assert(html.toLowerCase.contains("network"))
4654
}
55+
56+
// Restore the original setting for enabling the SparkUI
57+
sys.props("spark.ui.enabled") = oldSparkUIEnabled
4758
}
4859
}

yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -189,7 +189,7 @@ private[spark] class ApplicationMaster(args: ApplicationMasterArguments,
189189
if (sc == null) {
190190
finish(FinalApplicationStatus.FAILED, "Timed out waiting for SparkContext.")
191191
} else {
192-
registerAM(sc.ui.appUIAddress, securityMgr)
192+
registerAM(sc.ui.map(_.appUIAddress).getOrElse(""), securityMgr)
193193
try {
194194
userThread.join()
195195
} finally {

0 commit comments

Comments
 (0)