Skip to content

Commit e031719

Browse files
committed
fixes from review
1 parent 1f53a66 commit e031719

File tree

15 files changed

+91
-99
lines changed

15 files changed

+91
-99
lines changed

core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -140,17 +140,12 @@ private[history] class FsHistoryProvider(conf: SparkConf) extends ApplicationHis
140140
}
141141

142142
override def getListing(refresh: Boolean): Iterable[FsApplicationHistoryInfo] = {
143-
if (refresh) checkForLogs()
144143
applications.values
145144
}
146145

147146
override def getAppUI(appId: String): Option[SparkUI] = {
148147
try {
149-
val appOpt = applications.get(appId).orElse {
150-
getListing(true)
151-
applications.get(appId)
152-
}
153-
appOpt.map { info =>
148+
applications.get(appId).map { info =>
154149
val replayBus = new ReplayListenerBus()
155150
val ui = {
156151
val conf = this.conf.clone()

core/src/main/scala/org/apache/spark/deploy/history/HistoryPage.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ private[history] class HistoryPage(parent: HistoryServer) extends WebUIPage("")
3434
val requestedIncomplete =
3535
Option(request.getParameter("showIncomplete")).getOrElse("false").toBoolean
3636

37-
val allApps = parent.getApplicationList(true).filter(_.completed != requestedIncomplete)
37+
val allApps = parent.getApplicationList(false).filter(_.completed != requestedIncomplete)
3838
val actualFirst = if (requestedFirst < allApps.size) requestedFirst else 0
3939
val apps = allApps.slice(actualFirst, Math.min(actualFirst + pageSize, allApps.size))
4040

core/src/main/scala/org/apache/spark/status/api/v1/AllRDDResource.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -67,8 +67,8 @@ private[spark] object AllRDDResource {
6767
.flatMap { _.rddBlocksById(rddId) }
6868
.sortWith { _._1.name < _._1.name }
6969
.map { case (blockId, status) =>
70-
(blockId, status, blockLocations.get(blockId).getOrElse(Seq[String]("Unknown")))
71-
}
70+
(blockId, status, blockLocations.get(blockId).getOrElse(Seq[String]("Unknown")))
71+
}
7272

7373

7474
val dataDistribution = if (includeDetails) {

core/src/main/scala/org/apache/spark/status/api/v1/AllStagesResource.scala

Lines changed: 4 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -65,12 +65,12 @@ private[v1] object AllStagesResource {
6565
includeDetails: Boolean
6666
): StageData = {
6767

68-
val taskData = if(includeDetails) {
68+
val taskData = if (includeDetails) {
6969
Some(stageUiData.taskData.map { case (k, v) => k -> convertTaskData(v) } )
7070
} else {
7171
None
7272
}
73-
val executorSummary = if(includeDetails) {
73+
val executorSummary = if (includeDetails) {
7474
Some(stageUiData.executorSummary.map { case (k, summary) =>
7575
k -> new ExecutorStageSummary(
7676
taskTime = summary.taskTime,
@@ -129,7 +129,6 @@ private[v1] object AllStagesResource {
129129
}
130130
}
131131

132-
133132
def convertTaskData(uiData: TaskUIData): TaskData = {
134133
new TaskData(
135134
taskId = uiData.taskInfo.taskId,
@@ -153,10 +152,10 @@ private[v1] object AllStagesResource {
153152
val rawMetrics = allTaskData.flatMap{_.taskMetrics}.toSeq
154153

155154
def getMetric[T](data: Seq[T], f: T => Double): IndexedSeq[Double] =
156-
Distribution(data.map{d=> f(d)}).get.getQuantiles(quantiles)
155+
Distribution(data.map { d => f(d) }).get.getQuantiles(quantiles)
157156

158157
abstract class MetricHelper[I,O](f: InternalTaskMetrics => Option[I]) {
159-
val data: Seq[I] = rawMetrics.flatMap{x => f(x)}
158+
val data: Seq[I] = rawMetrics.flatMap { x => f(x) } // expanded to keep the compiler happy
160159
def build: O
161160
def m(f: I => Double): IndexedSeq[Double] = getMetric(data, f)
162161
def metricOption: Option[O] = {
@@ -211,7 +210,6 @@ private[v1] object AllStagesResource {
211210
)
212211
}.metricOption
213212

214-
215213
new TaskMetricDistributions(
216214
quantiles = quantiles,
217215
executorDeserializeTime = m(_.executorDeserializeTime),
@@ -232,7 +230,6 @@ private[v1] object AllStagesResource {
232230
new AccumulableInfo(acc.id, acc.name, acc.update, acc.value)
233231
}
234232

235-
236233
def convertUiTaskMetrics(internal: InternalTaskMetrics): TaskMetrics = {
237234
new TaskMetrics(
238235
executorDeserializeTime = internal.executorDeserializeTime,

core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
package org.apache.spark.status.api.v1
1818

1919
import java.util.Date
20+
import java.util.{Arrays, List => JList}
2021
import javax.ws.rs.{DefaultValue, GET, Produces, QueryParam}
2122
import javax.ws.rs.core.MediaType
2223

@@ -28,14 +29,14 @@ private[v1] class ApplicationListResource(uiRoot: UIRoot) {
2829

2930
@GET
3031
def appList(
31-
@QueryParam("status") status: java.util.List[ApplicationStatus],
32+
@QueryParam("status") status: JList[ApplicationStatus],
3233
@DefaultValue("2010-01-01") @QueryParam("minDate") minDate: SimpleDateParam,
3334
@DefaultValue("3000-01-01") @QueryParam("maxDate") maxDate: SimpleDateParam
3435
): Seq[ApplicationInfo] = {
3536
val allApps = uiRoot.getApplicationInfoList
3637
val adjStatus = {
3738
if (status.isEmpty) {
38-
java.util.Arrays.asList(ApplicationStatus.values: _*)
39+
Arrays.asList(ApplicationStatus.values: _*)
3940
} else {
4041
status
4142
}

core/src/main/scala/org/apache/spark/status/api/v1/JsonRootResource.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -139,7 +139,7 @@ private[v1] trait UIRootFromServletContext {
139139
}
140140

141141
private[v1] class NotFoundException(msg: String) extends WebApplicationException(
142-
new IllegalArgumentException(msg),
142+
new NoSuchElementException(msg),
143143
Response
144144
.status(Response.Status.NOT_FOUND)
145145
.entity(msg)
@@ -154,7 +154,6 @@ private[v1] class BadParameterException(msg: String) extends WebApplicationExcep
154154
.build()
155155
) {
156156
def this(param: String, exp: String, actual: String) = {
157-
this("Bad value for parameter \"" + param + "\". Expected a " + exp + ", got \"" +
158-
actual + "\"")
157+
this(raw"""Bad value for parameter "$param". Expected a $exp, got "$actual"""")
159158
}
160159
}

core/src/main/scala/org/apache/spark/status/api/v1/OneRDDResource.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ private[v1] class OneRDDResource(uiRoot: UIRoot) {
2929
): RDDStorageInfo = {
3030
uiRoot.withSparkUI(appId) { ui =>
3131
AllRDDResource.getRDDStorageInfo(rddId, ui.storageListener, true).getOrElse(
32-
throw new IllegalArgumentException("no rdd found w/ id " + rddId)
32+
throw new NotFoundException(s"no rdd found w/ id $rddId")
3333
)
3434
}
3535
}

core/src/main/scala/org/apache/spark/status/api/v1/OneStageResource.scala

Lines changed: 49 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ import org.apache.spark.scheduler.StageInfo
2424
import org.apache.spark.ui.jobs.JobProgressListener
2525
import org.apache.spark.ui.jobs.UIData.StageUIData
2626
import org.apache.spark.util.SparkEnum
27+
import org.apache.spark.status.api.v1.StageStatus._
2728

2829
@Produces(Array(MediaType.APPLICATION_JSON))
2930
private[v1] class OneStageResource(uiRoot: UIRoot) {
@@ -34,15 +35,9 @@ private[v1] class OneStageResource(uiRoot: UIRoot) {
3435
@PathParam("appId") appId: String,
3536
@PathParam("stageId") stageId: Int
3637
): Seq[StageData] = {
37-
forStage(appId, stageId){ (listener,stageAttempts) =>
38-
stageAttempts.map { case (status, stageInfo) =>
39-
val stageUiData = listener.synchronized {
40-
listener.stageIdToData.get((stageInfo.stageId, stageInfo.attemptId)).
41-
getOrElse(throw new SparkException("failed to get full stage data for stage: " +
42-
stageInfo.stageId + ":" + stageInfo.attemptId)
43-
)
44-
}
45-
AllStagesResource.stageUiToStageData(status, stageInfo, stageUiData,
38+
withStage(appId, stageId){ stageAttempts =>
39+
stageAttempts.map { stage =>
40+
AllStagesResource.stageUiToStageData(stage.status, stage.info, stage.ui,
4641
includeDetails = true)
4742
}
4843
}
@@ -55,8 +50,8 @@ private[v1] class OneStageResource(uiRoot: UIRoot) {
5550
@PathParam("stageId") stageId: Int,
5651
@PathParam("attemptId") attemptId: Int
5752
): StageData = {
58-
forStageAttempt(appId, stageId, attemptId) { case (status, stageInfo, stageUiData) =>
59-
AllStagesResource.stageUiToStageData(status, stageInfo, stageUiData,
53+
withStageAttempt(appId, stageId, attemptId) { stage =>
54+
AllStagesResource.stageUiToStageData(stage.status, stage.info, stage.ui,
6055
includeDetails = true)
6156
}
6257
}
@@ -69,16 +64,16 @@ private[v1] class OneStageResource(uiRoot: UIRoot) {
6964
@PathParam("attemptId") attemptId: Int,
7065
@DefaultValue("0.05,0.25,0.5,0.75,0.95") @QueryParam("quantiles") quantileString: String
7166
): TaskMetricDistributions = {
72-
forStageAttempt(appId, stageId, attemptId) { case (status, stageInfo, stageUiData) =>
73-
val quantiles = quantileString.split(",").map{s =>
67+
withStageAttempt(appId, stageId, attemptId) { stage =>
68+
val quantiles = quantileString.split(",").map { s =>
7469
try {
7570
s.toDouble
7671
} catch {
7772
case nfe: NumberFormatException =>
7873
throw new BadParameterException("quantiles", "double", s)
7974
}
8075
}
81-
AllStagesResource.taskMetricDistributions(stageUiData.taskData.values, quantiles)
76+
AllStagesResource.taskMetricDistributions(stage.ui.taskData.values, quantiles)
8277
}
8378
}
8479

@@ -92,49 +87,57 @@ private[v1] class OneStageResource(uiRoot: UIRoot) {
9287
@DefaultValue("20") @QueryParam("length") length: Int,
9388
@DefaultValue("ID") @QueryParam("sortBy") sortBy: TaskSorting
9489
): Seq[TaskData] = {
95-
forStageAttempt(appId, stageId, attemptId) { case (status, stageInfo, stageUiData) =>
96-
val tasks = stageUiData.taskData.values.map{AllStagesResource.convertTaskData}.toIndexedSeq
90+
withStageAttempt(appId, stageId, attemptId) { stage =>
91+
val tasks = stage.ui.taskData.values.map{AllStagesResource.convertTaskData}.toIndexedSeq
9792
.sorted(sortBy.ordering)
9893
tasks.slice(offset, offset + length)
9994
}
10095
}
10196

97+
private case class StageStatusInfoUi(status: StageStatus, info: StageInfo, ui: StageUIData)
10298

103-
def forStage[T](appId: String, stageId: Int)
104-
(f: (JobProgressListener, Seq[(StageStatus, StageInfo)]) => T): T = {
99+
private def withStage[T](appId: String, stageId: Int)
100+
(f: Seq[StageStatusInfoUi] => T): T = {
105101
uiRoot.withSparkUI(appId) { ui =>
106-
val stageAndStatus = AllStagesResource.stagesAndStatus(ui)
107-
val stageAttempts = stageAndStatus.flatMap { case (status, stages) =>
108-
val matched = stages.filter { stage => stage.stageId == stageId}
109-
matched.map {
110-
status -> _
111-
}
112-
}
102+
val stageAttempts = findStageStatusUIData(ui.jobProgressListener, stageId)
113103
if (stageAttempts.isEmpty) {
114104
throw new NotFoundException("unknown stage: " + stageId)
115105
} else {
116-
f(ui.jobProgressListener, stageAttempts)
106+
f(stageAttempts)
107+
}
108+
}
109+
}
110+
111+
private def findStageStatusUIData(
112+
listener: JobProgressListener,
113+
stageId: Int): Seq[StageStatusInfoUi] = {
114+
listener.synchronized {
115+
def getStatusInfoUi(status: StageStatus, infos: Seq[StageInfo]): Seq[StageStatusInfoUi] = {
116+
infos.filter { _.stageId == stageId }.map { info =>
117+
val ui = listener.stageIdToData.getOrElse((info.stageId, info.attemptId),
118+
//this is an internal error -- we should always have uiData
119+
throw new SparkException(
120+
s"no stage ui data found for stage: ${info.stageId}:${info.attemptId}")
121+
)
122+
StageStatusInfoUi(status, info, ui)
123+
}
117124
}
125+
getStatusInfoUi(Active, listener.activeStages.values.toSeq) ++
126+
getStatusInfoUi(Complete, listener.completedStages) ++
127+
getStatusInfoUi(Failed, listener.failedStages) ++
128+
getStatusInfoUi(Pending, listener.pendingStages.values.toSeq)
118129
}
119130
}
120131

121-
def forStageAttempt[T](appId: String, stageId: Int, attemptId: Int)
122-
(f: (StageStatus, StageInfo, StageUIData) => T): T = {
123-
forStage(appId, stageId) { case (listener, attempts) =>
124-
val oneAttempt = attempts.filter{ case (status, stage) =>
125-
stage.attemptId == attemptId
126-
}.headOption
132+
private def withStageAttempt[T](appId: String, stageId: Int, attemptId: Int)
133+
(f: StageStatusInfoUi => T): T = {
134+
withStage(appId, stageId) { attempts =>
135+
val oneAttempt = attempts.find { stage => stage.info.attemptId == attemptId }
127136
oneAttempt match {
128-
case Some((status, stageInfo)) =>
129-
val stageUiData = listener.synchronized {
130-
listener.stageIdToData.get((stageInfo.stageId, stageInfo.attemptId)).
131-
getOrElse(throw new SparkException("failed to get full stage data for stage: " +
132-
stageInfo.stageId + ":" + stageInfo.attemptId)
133-
)
134-
}
135-
f(status, stageInfo, stageUiData)
137+
case Some(stage) =>
138+
f(stage)
136139
case None =>
137-
val stageAttempts = attempts.map { _._2.attemptId}
140+
val stageAttempts = attempts.map { _.info.attemptId }
138141
throw new NotFoundException(s"unknown attempt for stage $stageId. " +
139142
s"Found attempts: ${stageAttempts.mkString("[", ",", "]")}")
140143
}
@@ -146,17 +149,18 @@ sealed abstract class TaskSorting extends SparkEnum {
146149
def ordering: Ordering[TaskData]
147150
def alternateNames: Seq[String] = Seq()
148151
}
152+
149153
object TaskSorting extends JerseyEnum[TaskSorting] {
150154
final val ID = {
151155
case object ID extends TaskSorting {
152-
def ordering = Ordering.by{td: TaskData => td.taskId}
156+
def ordering = Ordering.by { td: TaskData => td.taskId }
153157
}
154158
ID
155159
}
156160

157161
final val IncreasingRuntime = {
158162
case object IncreasingRuntime extends TaskSorting {
159-
def ordering = Ordering.by{td: TaskData =>
163+
def ordering = Ordering.by { td: TaskData =>
160164
td.taskMetrics.map{_.executorRunTime}.getOrElse(-1L)
161165
}
162166
override def alternateNames = Seq("runtime", "+runtime")
@@ -179,14 +183,13 @@ object TaskSorting extends JerseyEnum[TaskSorting] {
179183
)
180184

181185
val alternateNames: Map[String, TaskSorting] =
182-
values.flatMap{x => x.alternateNames.map{_ -> x}}.toMap
186+
values.flatMap { x => x.alternateNames.map { _ -> x } }.toMap
183187

184188
override def fromString(s: String): TaskSorting = {
185189
alternateNames.find { case (k, v) =>
186190
k.toLowerCase() == s.toLowerCase()
187-
}.map{_._2}.getOrElse{
191+
}.map { _._2 }.getOrElse{
188192
super.fromString(s)
189193
}
190194
}
191195
}
192-

core/src/main/scala/org/apache/spark/status/api/v1/SecurityFilter.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,8 +29,8 @@ private[v1] class SecurityFilter extends ContainerRequestFilter with UIRootFromS
2929
} else {
3030
throw new WebApplicationException(
3131
Response
32-
.status(Response.Status.UNAUTHORIZED)
33-
.entity("user \"" + user + "\"is not authorized")
32+
.status(Response.Status.FORBIDDEN)
33+
.entity(raw"""user "$user"is not authorized""")
3434
.build()
3535
)
3636
}

core/src/main/scala/org/apache/spark/ui/exec/ExecutorsPage.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,9 @@ package org.apache.spark.ui.exec
2020
import java.net.URLEncoder
2121
import javax.servlet.http.HttpServletRequest
2222

23-
import org.apache.spark.status.api.v1.ExecutorSummary
24-
2523
import scala.xml.Node
2624

25+
import org.apache.spark.status.api.v1.ExecutorSummary
2726
import org.apache.spark.ui.{ToolTips, UIUtils, WebUIPage}
2827
import org.apache.spark.util.Utils
2928

0 commit comments

Comments
 (0)