Skip to content

Commit ae571fa

Browse files
author
Marcelo Vanzin
committed
Fix end-to-end event logger test.
Checking that events have been written to the log file while the logger is running is brittle; instead, check that expected events show up in the file after the job is done, since that's really the functionality we care about. Also add another name parsing test, just for completeness.
1 parent 9db0efd commit ae571fa

File tree

2 files changed

+15
-29
lines changed

2 files changed

+15
-29
lines changed

core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -222,7 +222,7 @@ private[spark] object EventLoggingListener extends Logging {
222222
EventLoggingInfo(log, version, codec, inprogress == null)
223223
} catch {
224224
case e: Exception =>
225-
logError("Exception in parsing logging info from file %s".format(log), e)
225+
logWarning("Exception in parsing logging info from file %s".format(log), e)
226226
null
227227
}
228228
}

core/src/test/scala/org/apache/spark/scheduler/EventLoggingListenerSuite.scala

Lines changed: 14 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -117,6 +117,7 @@ class EventLoggingListenerSuite extends FunSuite with BeforeAndAfter with Loggin
117117
// Log file name, Spark version, Compression codec, in progress
118118
("app1-1234-1.0.inprogress", "1.0", None, true),
119119
("app2-1234-0.9.1", "0.9.1", None, false),
120+
("app-with-dashes-in-name-1234-1.0.1.inprogress", "1.0.1", None, true),
120121
("app3-1234-0.9-org.apache.spark.io.LZFCompressionCodec", "0.9",
121122
Some(classOf[LZFCompressionCodec]), false),
122123
("app-123456-1234-0.8-org.apache.spark.io.SnappyCompressionCodec.inprogress", "0.8",
@@ -254,20 +255,21 @@ class EventLoggingListenerSuite extends FunSuite with BeforeAndAfter with Loggin
254255

255256
// Ensure all asserts have actually been triggered
256257
eventExistenceListener.assertAllCallbacksInvoked()
257-
}
258258

259-
/**
260-
* Assert that all of the specified events are logged by the given EventLoggingListener.
261-
*
262-
* This is done while the application is still running, so the log file contains the
263-
* IN_PROGRESS suffix.
264-
*/
265-
private def assertEventsExist(eventLogger: EventLoggingListener, events: Seq[String]) {
266-
val eventLoggingInfo = EventLoggingListener.parseLoggingInfo(
267-
new Path(eventLogger.logPath + EventLoggingListener.IN_PROGRESS))
268-
assert(eventLoggingInfo != null)
259+
// Make sure expected events exist in the log file.
260+
val eventLoggingInfo = EventLoggingListener.parseLoggingInfo(new Path(eventLogger.logPath))
269261
val lines = readFileLines(eventLoggingInfo.path, eventLoggingInfo.compressionCodec)
270-
val eventSet = mutable.Set(events: _*)
262+
val eventSet = mutable.Set(
263+
Utils.getFormattedClassName(SparkListenerApplicationStart),
264+
Utils.getFormattedClassName(SparkListenerBlockManagerAdded),
265+
Utils.getFormattedClassName(SparkListenerEnvironmentUpdate),
266+
Utils.getFormattedClassName(SparkListenerJobStart),
267+
Utils.getFormattedClassName(SparkListenerJobEnd),
268+
Utils.getFormattedClassName(SparkListenerStageSubmitted),
269+
Utils.getFormattedClassName(SparkListenerStageCompleted),
270+
Utils.getFormattedClassName(SparkListenerTaskStart),
271+
Utils.getFormattedClassName(SparkListenerTaskEnd),
272+
Utils.getFormattedClassName(SparkListenerApplicationEnd))
271273
lines.foreach { line =>
272274
eventSet.foreach { event =>
273275
if (line.contains(event)) {
@@ -307,30 +309,14 @@ class EventLoggingListenerSuite extends FunSuite with BeforeAndAfter with Loggin
307309
var appEnded = false
308310

309311
override def onJobStart(jobStart: SparkListenerJobStart) {
310-
assertEventsExist(eventLogger, Seq[String](
311-
Utils.getFormattedClassName(SparkListenerApplicationStart),
312-
Utils.getFormattedClassName(SparkListenerBlockManagerAdded),
313-
Utils.getFormattedClassName(SparkListenerEnvironmentUpdate)
314-
))
315312
jobStarted = true
316313
}
317314

318315
override def onJobEnd(jobEnd: SparkListenerJobEnd) {
319-
assertEventsExist(eventLogger, Seq[String](
320-
Utils.getFormattedClassName(SparkListenerJobStart),
321-
Utils.getFormattedClassName(SparkListenerJobEnd),
322-
Utils.getFormattedClassName(SparkListenerStageSubmitted),
323-
Utils.getFormattedClassName(SparkListenerStageCompleted),
324-
Utils.getFormattedClassName(SparkListenerTaskStart),
325-
Utils.getFormattedClassName(SparkListenerTaskEnd)
326-
))
327316
jobEnded = true
328317
}
329318

330319
override def onApplicationEnd(applicationEnd: SparkListenerApplicationEnd) {
331-
assertEventsExist(eventLogger, Seq[String](
332-
Utils.getFormattedClassName(SparkListenerApplicationEnd)
333-
))
334320
appEnded = true
335321
}
336322

0 commit comments

Comments
 (0)