Skip to content

Commit b86e2b0

Browse files
committed
style
1 parent 18a8c45 commit b86e2b0

19 files changed

+73
-112
lines changed

core/src/main/java/org/apache/spark/JobExecutionStatus.java

Lines changed: 2 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -17,31 +17,15 @@
1717

1818
package org.apache.spark;
1919

20-
import com.google.common.base.Joiner;
21-
22-
import java.util.Arrays;
20+
import org.apache.spark.status.api.EnumUtil;
2321

2422
public enum JobExecutionStatus {
2523
RUNNING,
2624
SUCCEEDED,
2725
FAILED,
2826
UNKNOWN;
2927

30-
31-
private static String VALID_VALUES = Joiner.on(", ").join(
32-
Arrays.asList(JobExecutionStatus.values()));
33-
3428
public static JobExecutionStatus fromString(String str) {
35-
if (str == null) {
36-
return null;
37-
}
38-
try {
39-
JobExecutionStatus res = valueOf(str.toUpperCase());
40-
return res;
41-
} catch (IllegalArgumentException iae) {
42-
throw new IllegalArgumentException(
43-
String.format("Illegal type='%s'. Supported type values: %s",
44-
str, VALID_VALUES));
45-
}
29+
return EnumUtil.parseIgnoreCase(JobExecutionStatus.class, str);
4630
}
4731
}

core/src/main/java/org/apache/spark/status/api/ApplicationStatus.java

Lines changed: 5 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -17,16 +17,12 @@
1717

1818
package org.apache.spark.status.api;
1919

20-
import com.google.common.base.Joiner;
21-
22-
import java.util.Arrays;
23-
2420
public enum ApplicationStatus {
25-
COMPLETED,
26-
RUNNING;
21+
COMPLETED,
22+
RUNNING;
2723

28-
public static ApplicationStatus fromString(String str) {
29-
return EnumUtil.parseIgnoreCase(ApplicationStatus.class, str);
30-
}
24+
public static ApplicationStatus fromString(String str) {
25+
return EnumUtil.parseIgnoreCase(ApplicationStatus.class, str);
26+
}
3127

3228
}

core/src/main/java/org/apache/spark/status/api/EnumUtil.java

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -21,18 +21,18 @@
2121
import java.util.Arrays;
2222

2323
public class EnumUtil {
24-
public static <E extends Enum<E>> E parseIgnoreCase(Class<E> clz, String str) {
25-
E[] constants = clz.getEnumConstants();
26-
if (str == null) {
27-
return null;
28-
}
29-
for (E e: constants) {
30-
if (e.name().equalsIgnoreCase(str))
31-
return e;
32-
}
33-
throw new IllegalArgumentException(
34-
String.format("Illegal type='%s'. Supported type values: %s",
35-
str, Joiner.on(", ").join(
36-
Arrays.asList(constants))));
24+
public static <E extends Enum<E>> E parseIgnoreCase(Class<E> clz, String str) {
25+
E[] constants = clz.getEnumConstants();
26+
if (str == null) {
27+
return null;
3728
}
29+
for (E e : constants) {
30+
if (e.name().equalsIgnoreCase(str))
31+
return e;
32+
}
33+
throw new IllegalArgumentException(
34+
String.format("Illegal type='%s'. Supported type values: %s",
35+
str, Joiner.on(", ").join(
36+
Arrays.asList(constants))));
37+
}
3838
}

core/src/main/java/org/apache/spark/status/api/StageStatus.java

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -18,12 +18,12 @@
1818
package org.apache.spark.status.api;
1919

2020
public enum StageStatus {
21-
Active,
22-
Complete,
23-
Failed,
24-
Pending;
21+
Active,
22+
Complete,
23+
Failed,
24+
Pending;
2525

26-
public static StageStatus fromString(String str) {
27-
return EnumUtil.parseIgnoreCase(StageStatus.class, str);
28-
}
26+
public static StageStatus fromString(String str) {
27+
return EnumUtil.parseIgnoreCase(StageStatus.class, str);
28+
}
2929
}

core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,6 @@ class HistoryServer(
7373
protected override def doGet(req: HttpServletRequest, res: HttpServletResponse): Unit = {
7474
val parts = Option(req.getPathInfo()).getOrElse("").split("/")
7575
if (parts.length < 2) {
76-
logError("bad path info!")
7776
res.sendError(HttpServletResponse.SC_BAD_REQUEST,
7877
s"Unexpected path info in request (URI = ${req.getRequestURI()}")
7978
return
@@ -163,7 +162,7 @@ class HistoryServer(
163162
def getApplicationList(refresh: Boolean) = provider.getListing(refresh)
164163

165164
def getApplicationInfoList: Seq[ApplicationInfo] = {
166-
getApplicationList(true).map{ApplicationsListResource.appHistoryInfoToPublicAppInfo}.toSeq
165+
getApplicationList(true).map { ApplicationsListResource.appHistoryInfoToPublicAppInfo }.toSeq
167166
}
168167

169168
/**

core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -70,15 +70,15 @@ class MasterWebUI(val master: Master, requestedPort: Int)
7070
val state = masterPage.getMasterState
7171
val activeApps = state.activeApps.sortBy(_.startTime).reverse
7272
val completedApps = state.completedApps.sortBy(_.endTime).reverse
73-
activeApps.map{ApplicationsListResource.convertApplicationInfo(_, false)} ++
74-
completedApps.map{ApplicationsListResource.convertApplicationInfo(_, true)}
73+
activeApps.map { ApplicationsListResource.convertApplicationInfo(_, false) } ++
74+
completedApps.map { ApplicationsListResource.convertApplicationInfo(_, true) }
7575
}
7676

7777
def getSparkUI(appId: String): Option[SparkUI] = {
7878
val state = masterPage.getMasterState
7979
val activeApps = state.activeApps.sortBy(_.startTime).reverse
8080
val completedApps = state.completedApps.sortBy(_.endTime).reverse
81-
(activeApps ++ completedApps).find{_.id == appId}.flatMap{
81+
(activeApps ++ completedApps).find { _.id == appId }.flatMap {
8282
master.rebuildSparkUI
8383
}
8484
}

core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerJsonRoute.scala

Lines changed: 0 additions & 18 deletions
This file was deleted.

core/src/main/scala/org/apache/spark/status/api/v1/AllJobsResource.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -32,8 +32,8 @@ class AllJobsResource(uiRoot: UIRoot) {
3232
@PathParam("appId") appId: String,
3333
@QueryParam("status") statuses: java.util.List[JobExecutionStatus]
3434
): Seq[JobData] = {
35-
uiRoot.withSparkUI(appId){ui =>
36-
val statusToJobs = ui.jobProgressListener.synchronized{
35+
uiRoot.withSparkUI(appId) { ui =>
36+
val statusToJobs = ui.jobProgressListener.synchronized {
3737
Seq(
3838
JobExecutionStatus.RUNNING -> ui.jobProgressListener.activeJobs.values.toSeq,
3939
JobExecutionStatus.SUCCEEDED -> ui.jobProgressListener.completedJobs.toSeq,
@@ -70,8 +70,8 @@ object AllJobsResource {
7070
val lastStageData = lastStageInfo.flatMap { s =>
7171
listener.stageIdToData.get((s.stageId, s.attemptId))
7272
}
73-
val lastStageName = lastStageInfo.map(_.name).getOrElse("(Unknown Stage Name)")
74-
val lastStageDescription = lastStageData.flatMap(_.description)
73+
val lastStageName = lastStageInfo.map { _.name }.getOrElse("(Unknown Stage Name)")
74+
val lastStageDescription = lastStageData.flatMap { _.description }
7575
JobData(
7676
jobId = job.jobId,
7777
name = lastStageName,

core/src/main/scala/org/apache/spark/status/api/v1/AllRDDResource.scala

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ object AllRDDResource {
5050
listener: StorageListener,
5151
includeDetails: Boolean): Option[RDDStorageInfo] = {
5252
val storageStatusList = listener.storageStatusList
53-
listener.rddInfoList.find(_.id == rddId).map{rddInfo =>
53+
listener.rddInfoList.find { _.id == rddId }.map { rddInfo =>
5454
getRDDStorageInfo(rddId, rddInfo, storageStatusList, includeDetails)
5555
}
5656
}
@@ -61,37 +61,37 @@ object AllRDDResource {
6161
storageStatusList: Seq[StorageStatus],
6262
includeDetails: Boolean
6363
): RDDStorageInfo = {
64-
val workers = storageStatusList.map((rddId, _))
64+
val workers = storageStatusList.map { (rddId, _) }
6565
val blockLocations = StorageUtils.getRddBlockLocations(rddId, storageStatusList)
6666
val blocks = storageStatusList
67-
.flatMap(_.rddBlocksById(rddId))
68-
.sortWith(_._1.name < _._1.name)
67+
.flatMap { _.rddBlocksById(rddId) }
68+
.sortWith { _._1.name < _._1.name }
6969
.map { case (blockId, status) =>
7070
(blockId, status, blockLocations.get(blockId).getOrElse(Seq[String]("Unknown")))
7171
}
7272

7373

7474
val dataDistribution = if (includeDetails) {
75-
Some(storageStatusList.map{status =>
75+
Some(storageStatusList.map { status =>
7676
RDDDataDistribution(
7777
address = status.blockManagerId.hostPort,
7878
memoryUsed = status.memUsedByRdd(rddId),
7979
memoryRemaining = status.memRemaining,
8080
diskUsed = status.diskUsedByRdd(rddId)
81-
)})
81+
) } )
8282
} else {
8383
None
8484
}
8585
val partitions = if (includeDetails) {
86-
Some(blocks.map{ case(id, block, locations) =>
86+
Some(blocks.map { case(id, block, locations) =>
8787
RDDPartitionInfo(
8888
blockName = id.name,
8989
storageLevel = block.storageLevel.description,
9090
memoryUsed = block.memSize,
9191
diskUsed = block.diskSize,
9292
executors = locations
9393
)
94-
})
94+
} )
9595
} else {
9696
None
9797
}

core/src/main/scala/org/apache/spark/status/api/v1/AllStagesResource.scala

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -66,13 +66,13 @@ object AllStagesResource {
6666
): StageData = {
6767

6868
val taskData = if(includeDetails) {
69-
Some(stageUiData.taskData.map{case(k,v) => k -> convertTaskData(v)})
69+
Some(stageUiData.taskData.map { case(k,v) => k -> convertTaskData(v) } )
7070
} else {
7171
None
7272
}
7373
val executorSummary = if(includeDetails) {
74-
Some(stageUiData.executorSummary.map{case(k,summary) => k ->
75-
ExecutorStageSummary(
74+
Some(stageUiData.executorSummary.map { case(k,summary) =>
75+
k -> ExecutorStageSummary(
7676
taskTime = summary.taskTime,
7777
failedTasks = summary.failedTasks,
7878
succeededTasks = summary.succeededTasks,
@@ -114,7 +114,7 @@ object AllStagesResource {
114114

115115
def stagesAndStatus(ui: SparkUI): Seq[(StageStatus, Seq[StageInfo])] = {
116116
val listener = ui.stagesTab.listener
117-
listener.synchronized{
117+
listener.synchronized {
118118
Seq(
119119
StageStatus.Active -> listener.activeStages.values.toSeq,
120120
StageStatus.Complete -> listener.completedStages.reverse.toSeq,
@@ -136,7 +136,7 @@ object AllStagesResource {
136136
taskLocality = uiData.taskInfo.taskLocality.toString(),
137137
speculative = uiData.taskInfo.speculative,
138138
errorMessage = uiData.errorMessage,
139-
taskMetrics = uiData.taskMetrics.map{convertUiTaskMetrics}
139+
taskMetrics = uiData.taskMetrics.map { convertUiTaskMetrics }
140140
)
141141
}
142142

@@ -149,10 +149,10 @@ object AllStagesResource {
149149
resultSerializationTime = internal.resultSerializationTime,
150150
memoryBytesSpilled = internal.memoryBytesSpilled,
151151
diskBytesSpilled = internal.diskBytesSpilled,
152-
inputMetrics = internal.inputMetrics.map{convertInputMetrics},
153-
outputMetrics = Option(internal.outputMetrics).flatten.map{convertOutputMetrics},
154-
shuffleReadMetrics = internal.shuffleReadMetrics.map{convertShuffleReadMetrics},
155-
shuffleWriteMetrics = internal.shuffleWriteMetrics.map{convertShuffleWriteMetrics}
152+
inputMetrics = internal.inputMetrics.map { convertInputMetrics },
153+
outputMetrics = Option(internal.outputMetrics).flatten.map { convertOutputMetrics },
154+
shuffleReadMetrics = internal.shuffleReadMetrics.map { convertShuffleReadMetrics },
155+
shuffleWriteMetrics = internal.shuffleWriteMetrics.map { convertShuffleWriteMetrics }
156156
)
157157
}
158158

0 commit comments

Comments
 (0)