Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -693,6 +693,11 @@ private[spark] class DAGScheduler(

val jobId = nextJobId.getAndIncrement()
if (partitions.size == 0) {
val time = clock.getTimeMillis()
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is looking OK to me, though I wouldn't mind, say, @cloud-fan taking a quick look.

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@srowen Thank you! @cloud-fan Could you give me some suggestions?

listenerBus.post(
Comment thread
srowen marked this conversation as resolved.
SparkListenerJobStart(jobId, time, Seq[StageInfo](), properties))
listenerBus.post(
SparkListenerJobEnd(jobId, time, JobSucceeded))
// Return immediately if the job is running 0 tasks
return new JobWaiter[U](this, jobId, 0, resultHandler)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -319,15 +319,15 @@ private[spark] class AppStatusListener(
}

val lastStageInfo = event.stageInfos.sortBy(_.stageId).lastOption
val lastStageName = lastStageInfo.map(_.name).getOrElse("(Unknown Stage Name)")
val jobName = lastStageInfo.map(_.name).getOrElse("")
val jobGroup = Option(event.properties)
.flatMap { p => Option(p.getProperty(SparkContext.SPARK_JOB_GROUP_ID)) }
val sqlExecutionId = Option(event.properties)
.flatMap(p => Option(p.getProperty(SQL_EXECUTION_ID_KEY)).map(_.toLong))

val job = new LiveJob(
event.jobId,
lastStageName,
jobName,
if (event.time > 0) Some(new Date(event.time)) else None,
event.stageIds,
jobGroup,
Expand Down
3 changes: 2 additions & 1 deletion core/src/main/scala/org/apache/spark/ui/UIUtils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -363,7 +363,8 @@ private[spark] object UIUtils extends Logging {
skipped: Int,
reasonToNumKilled: Map[String, Int],
total: Int): Seq[Node] = {
val completeWidth = "width: %s%%".format((completed.toDouble/total)*100)
val ratio = if (total == 0) 100.0 else (completed.toDouble/total)*100
val completeWidth = "width: %s%%".format(ratio)
// started + completed can be > total when there are speculative tasks
val boundedStarted = math.min(started, total - completed)
val startWidth = "width: %s%%".format((boundedStarted.toDouble/total)*100)
Expand Down
9 changes: 7 additions & 2 deletions core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
Original file line number Diff line number Diff line change
Expand Up @@ -857,8 +857,13 @@ private[spark] object ApiHelper {
}

def lastStageNameAndDescription(store: AppStatusStore, job: JobData): (String, String) = {
val stage = store.asOption(store.stageAttempt(job.stageIds.max, 0)._1)
(stage.map(_.name).getOrElse(""), stage.flatMap(_.description).getOrElse(job.name))
// Some jobs have only 0 partitions.
if (job.stageIds.isEmpty) {
("", job.name)
} else {
val stage = store.asOption(store.stageAttempt(job.stageIds.max, 0)._1)
(stage.map(_.name).getOrElse(""), stage.flatMap(_.description).getOrElse(job.name))
}
}

}