|
| 1 | +/* |
| 2 | + * Licensed to the Apache Software Foundation (ASF) under one or more |
| 3 | + * contributor license agreements. See the NOTICE file distributed with |
| 4 | + * this work for additional information regarding copyright ownership. |
| 5 | + * The ASF licenses this file to You under the Apache License, Version 2.0 |
| 6 | + * (the "License"); you may not use this file except in compliance with |
| 7 | + * the License. You may obtain a copy of the License at |
| 8 | + * |
| 9 | + * http://www.apache.org/licenses/LICENSE-2.0 |
| 10 | + * |
| 11 | + * Unless required by applicable law or agreed to in writing, software |
| 12 | + * distributed under the License is distributed on an "AS IS" BASIS, |
| 13 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 14 | + * See the License for the specific language governing permissions and |
| 15 | + * limitations under the License. |
| 16 | + */ |
| 17 | + |
| 18 | +package org.apache.spark.streaming.ui |
| 19 | + |
| 20 | +import javax.servlet.http.HttpServletRequest |
| 21 | + |
| 22 | +import org.apache.commons.lang3.StringEscapeUtils |
| 23 | +import org.apache.spark.streaming.Time |
| 24 | +import org.apache.spark.ui.{UIUtils, WebUIPage} |
| 25 | +import org.apache.spark.streaming.ui.StreamingJobProgressListener.{JobId, OutputOpId} |
| 26 | +import org.apache.spark.ui.jobs.UIData.JobUIData |
| 27 | + |
| 28 | +import scala.xml.{NodeSeq, Node} |
| 29 | + |
| 30 | +class BatchPage(parent: StreamingTab) extends WebUIPage("batch") { |
| 31 | + private val streaminglistener = parent.listener |
| 32 | + private val sparkListener = parent.ssc.sc.jobProgressListener |
| 33 | + |
| 34 | + private def columns: Seq[Node] = { |
| 35 | + <th>Output Op Id</th> |
| 36 | + <th>Description</th> |
| 37 | + <th>Duration</th> |
| 38 | + <th>Job Id</th> |
| 39 | + <th>Duration</th> |
| 40 | + <th class="sorttable_nosort">Stages: Succeeded/Total</th> |
| 41 | + <th class="sorttable_nosort">Tasks (for all stages): Succeeded/Total</th> |
| 42 | + <th>Last Error</th> |
| 43 | + } |
| 44 | + |
| 45 | + private def makeOutputOpIdRow(outputOpId: OutputOpId, jobs: Seq[JobUIData]): Seq[Node] = { |
| 46 | + val jobDurations = jobs.map(job => { |
| 47 | + job.submissionTime.map { start => |
| 48 | + val end = job.completionTime.getOrElse(System.currentTimeMillis()) |
| 49 | + end - start |
| 50 | + } |
| 51 | + }) |
| 52 | + val formattedOutputOpDuration = |
| 53 | + if (jobDurations.exists(_ == None)) { |
| 54 | + // If any job does not finish, set "formattedOutputOpDuration" to "-" |
| 55 | + "-" |
| 56 | + } else { |
| 57 | + UIUtils.formatDuration(jobDurations.flatMap(x => x).sum) |
| 58 | + } |
| 59 | + |
| 60 | + def makeJobRow(job: JobUIData, isFirstRow: Boolean): Seq[Node] = { |
| 61 | + val lastStageInfo = Option(job.stageIds) |
| 62 | + .filter(_.nonEmpty) |
| 63 | + .flatMap { ids => sparkListener.stageIdToInfo.get(ids.max) } |
| 64 | + val lastStageData = lastStageInfo.flatMap { s => |
| 65 | + sparkListener.stageIdToData.get((s.stageId, s.attemptId)) |
| 66 | + } |
| 67 | + |
| 68 | + val lastStageName = lastStageInfo.map(_.name).getOrElse("(Unknown Stage Name)") |
| 69 | + val lastStageDescription = lastStageData.flatMap(_.description).getOrElse("") |
| 70 | + val duration: Option[Long] = { |
| 71 | + job.submissionTime.map { start => |
| 72 | + val end = job.completionTime.getOrElse(System.currentTimeMillis()) |
| 73 | + end - start |
| 74 | + } |
| 75 | + } |
| 76 | + val lastFailureReason = job.stageIds.sorted.reverse.flatMap(sparkListener.stageIdToInfo.get). |
| 77 | + dropWhile(_.failureReason == None).take(1). // get the first info that contains failure |
| 78 | + flatMap(info => info.failureReason).headOption.getOrElse("") |
| 79 | + val formattedDuration = duration.map(d => UIUtils.formatDuration(d)).getOrElse("-") |
| 80 | + val detailUrl = s"${UIUtils.prependBaseUri(parent.basePath)}/jobs/job?id=${job.jobId}" |
| 81 | + <tr> |
| 82 | + {if(isFirstRow) { |
| 83 | + <td rowspan={jobs.size.toString}>{outputOpId}</td> |
| 84 | + <td rowspan={jobs.size.toString}> |
| 85 | + <span class="description-input" title={lastStageDescription}> |
| 86 | + {lastStageDescription} |
| 87 | + </span>{lastStageName} |
| 88 | + </td> |
| 89 | + <td rowspan={jobs.size.toString}>{formattedOutputOpDuration}</td>} |
| 90 | + } |
| 91 | + <td sorttable_customkey={job.jobId.toString}> |
| 92 | + <a href={detailUrl}> |
| 93 | + {job.jobId}{job.jobGroup.map(id => s"($id)").getOrElse("")} |
| 94 | + </a> |
| 95 | + </td> |
| 96 | + <td sorttable_customkey={duration.getOrElse(Long.MaxValue).toString}> |
| 97 | + {formattedDuration} |
| 98 | + </td> |
| 99 | + <td class="stage-progress-cell"> |
| 100 | + {job.completedStageIndices.size}/{job.stageIds.size - job.numSkippedStages} |
| 101 | + {if (job.numFailedStages > 0) s"(${job.numFailedStages} failed)"} |
| 102 | + {if (job.numSkippedStages > 0) s"(${job.numSkippedStages} skipped)"} |
| 103 | + </td> |
| 104 | + <td class="progress-cell"> |
| 105 | + {UIUtils.makeProgressBar(started = job.numActiveTasks, completed = job.numCompletedTasks, |
| 106 | + failed = job.numFailedTasks, skipped = job.numSkippedTasks, |
| 107 | + total = job.numTasks - job.numSkippedTasks)} |
| 108 | + </td> |
| 109 | + {failureReasonCell(lastFailureReason)} |
| 110 | + </tr> |
| 111 | + } |
| 112 | + |
| 113 | + makeJobRow(jobs.head, true) ++ (jobs.tail.map(job => makeJobRow(job, false)).flatMap(x => x)) |
| 114 | + } |
| 115 | + |
| 116 | + private def failureReasonCell(failureReason: String): Seq[Node] = { |
| 117 | + val isMultiline = failureReason.indexOf('\n') >= 0 |
| 118 | + // Display the first line by default |
| 119 | + val failureReasonSummary = StringEscapeUtils.escapeHtml4( |
| 120 | + if (isMultiline) { |
| 121 | + failureReason.substring(0, failureReason.indexOf('\n')) |
| 122 | + } else { |
| 123 | + failureReason |
| 124 | + }) |
| 125 | + val details = if (isMultiline) { |
| 126 | + // scalastyle:off |
| 127 | + <span onclick="this.parentNode.querySelector('.stacktrace-details').classList.toggle('collapsed')" |
| 128 | + class="expand-details"> |
| 129 | + +details |
| 130 | + </span> ++ |
| 131 | + <div class="stacktrace-details collapsed"> |
| 132 | + <pre>{failureReason}</pre> |
| 133 | + </div> |
| 134 | + // scalastyle:on |
| 135 | + } else { |
| 136 | + "" |
| 137 | + } |
| 138 | + <td valign="middle">{failureReasonSummary}{details}</td> |
| 139 | + } |
| 140 | + |
| 141 | + private def jobsTable(jobInfos: Seq[(OutputOpId, JobId)]): Seq[Node] = { |
| 142 | + def getJobData(jobId: JobId): Option[JobUIData] = { |
| 143 | + sparkListener.activeJobs.get(jobId).orElse { |
| 144 | + sparkListener.completedJobs.find(_.jobId == jobId).orElse { |
| 145 | + sparkListener.failedJobs.find(_.jobId == jobId) |
| 146 | + } |
| 147 | + } |
| 148 | + } |
| 149 | + |
| 150 | + // Group jobInfos by OutputOpId firstly, then sort them. |
| 151 | + // E.g., [(0, 1), (1, 3), (0, 2), (1, 4)] => [(0, [1, 2]), (1, [3, 4])] |
| 152 | + val outputOpIdWithJobIds: Seq[(OutputOpId, Seq[JobId])] = |
| 153 | + jobInfos.groupBy(_._1).toSeq.sortBy(_._1). // sorted by OutputOpId |
| 154 | + map { case (outputOpId, jobs) => |
| 155 | + (outputOpId, jobs.map(_._2).sortBy(x => x).toSeq)} // sort JobIds for each OutputOpId |
| 156 | + sparkListener.synchronized { |
| 157 | + val outputOpIdWithJobs: Seq[(OutputOpId, Seq[JobUIData])] = outputOpIdWithJobIds.map { |
| 158 | + case (outputOpId, jobIds) => |
| 159 | + // Filter out JobIds that don't exist in sparkListener |
| 160 | + (outputOpId, jobIds.flatMap(getJobData)) |
| 161 | + } |
| 162 | + |
| 163 | + <table id="batch-job-table" class="table table-bordered table-striped table-condensed"> |
| 164 | + <thead> |
| 165 | + {columns} |
| 166 | + </thead> |
| 167 | + <tbody> |
| 168 | + {outputOpIdWithJobs.map { case (outputOpId, jobs) => makeOutputOpIdRow(outputOpId, jobs)}} |
| 169 | + </tbody> |
| 170 | + </table> |
| 171 | + } |
| 172 | + } |
| 173 | + |
| 174 | + def render(request: HttpServletRequest): Seq[Node] = { |
| 175 | + val batchTime = Option(request.getParameter("id")).map(id => Time(id.toLong)).getOrElse { |
| 176 | + throw new IllegalArgumentException(s"Missing id parameter") |
| 177 | + } |
| 178 | + val formattedBatchTime = UIUtils.formatDate(batchTime.milliseconds) |
| 179 | + val (batchInfo, jobInfos) = streaminglistener.synchronized { |
| 180 | + val _batchInfo = streaminglistener.getBatchInfo(batchTime).getOrElse { |
| 181 | + throw new IllegalArgumentException(s"Batch $formattedBatchTime does not exist") |
| 182 | + } |
| 183 | + val _jobInfos = streaminglistener.getJobInfos(batchTime) |
| 184 | + (_batchInfo, _jobInfos) |
| 185 | + } |
| 186 | + |
| 187 | + val formattedSchedulingDelay = |
| 188 | + batchInfo.schedulingDelay.map(UIUtils.formatDuration).getOrElse("-") |
| 189 | + val formattedProcessingTime = |
| 190 | + batchInfo.processingDelay.map(UIUtils.formatDuration).getOrElse("-") |
| 191 | + val formattedTotalDelay = batchInfo.totalDelay.map(UIUtils.formatDuration).getOrElse("-") |
| 192 | + |
| 193 | + val summary: NodeSeq = |
| 194 | + <div> |
| 195 | + <ul class="unstyled"> |
| 196 | + <li> |
| 197 | + <strong>Batch Duration: </strong> |
| 198 | + {UIUtils.formatDuration(streaminglistener.batchDuration)} |
| 199 | + </li> |
| 200 | + <li> |
| 201 | + <strong>Input data size: </strong> |
| 202 | + {batchInfo.numRecords} records |
| 203 | + </li> |
| 204 | + <li> |
| 205 | + <strong>Scheduling delay: </strong> |
| 206 | + {formattedSchedulingDelay} records |
| 207 | + </li> |
| 208 | + <li> |
| 209 | + <strong>Processing time: </strong> |
| 210 | + {formattedProcessingTime} |
| 211 | + </li> |
| 212 | + <li> |
| 213 | + <strong>Total delay: </strong> |
| 214 | + {formattedTotalDelay} records |
| 215 | + </li> |
| 216 | + </ul> |
| 217 | + </div> |
| 218 | + |
| 219 | + val content = summary ++ jobInfos.map(jobsTable).getOrElse { |
| 220 | + <div>Cannot find any job for Batch {formattedBatchTime}</div> |
| 221 | + } |
| 222 | + UIUtils.headerSparkPage(s"Details of batch at $formattedBatchTime", content, parent) |
| 223 | + } |
| 224 | +} |
0 commit comments