Skip to content

Commit a325563

Browse files
committed
style
1 parent a9c5cf1 commit a325563

File tree

9 files changed

+43
-59
lines changed

9 files changed

+43
-59
lines changed

core/src/main/scala/org/apache/spark/deploy/master/Master.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -768,7 +768,6 @@ private[master] class Master(
768768
if (inProgressExists) {
769769
// Event logging is enabled for this application, but the application is still in progress
770770
logWarning(s"Application $appName is still in progress, it may be terminated abnormally.")
771-
return None
772771
}
773772

774773
val (eventLogFile, status) = if (inProgressExists) {

core/src/main/scala/org/apache/spark/status/api/v1/AllJobsResource.scala

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,7 @@
1616
*/
1717
package org.apache.spark.status.api.v1
1818

19-
import java.util
20-
import java.util.Date
19+
import java.util.{Arrays, Date, List => JList}
2120
import javax.ws.rs._
2221
import javax.ws.rs.core.MediaType
2322

@@ -32,16 +31,15 @@ private[v1] class AllJobsResource(uiRoot: UIRoot) {
3231
@GET
3332
def jobsList(
3433
@PathParam("appId") appId: String,
35-
@QueryParam("status") statuses: java.util.List[JobExecutionStatus]
34+
@QueryParam("status") statuses: JList[JobExecutionStatus]
3635
): Seq[JobData] = {
3736
uiRoot.withSparkUI(appId) { ui =>
3837
val statusToJobs: Seq[(JobExecutionStatus, Seq[JobUIData])] =
3938
AllJobsResource.getStatusToJobs(ui)
40-
val adjStatuses: util.List[JobExecutionStatus] = {
39+
val adjStatuses: JList[JobExecutionStatus] = {
4140
if (statuses.isEmpty) {
42-
java.util.Arrays.asList(JobExecutionStatus.values(): _*)
43-
}
44-
else {
41+
Arrays.asList(JobExecutionStatus.values(): _*)
42+
} else {
4543
statuses
4644
}
4745
}

core/src/main/scala/org/apache/spark/status/api/v1/AllStagesResource.scala

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
*/
1717
package org.apache.spark.status.api.v1
1818

19-
import java.util.Date
19+
import java.util.{Arrays, Date, List => JList}
2020
import javax.ws.rs.{GET, PathParam, Produces, QueryParam}
2121
import javax.ws.rs.core.MediaType
2222

@@ -33,14 +33,14 @@ private[v1] class AllStagesResource(uiRoot: UIRoot) {
3333
@GET
3434
def stageList(
3535
@PathParam("appId") appId: String,
36-
@QueryParam("status") statuses: java.util.List[StageStatus]
36+
@QueryParam("status") statuses: JList[StageStatus]
3737
): Seq[StageData] = {
3838
uiRoot.withSparkUI(appId) { ui =>
3939
val listener = ui.jobProgressListener
4040
val stageAndStatus = AllStagesResource.stagesAndStatus(ui)
4141
val adjStatuses = {
4242
if (statuses.isEmpty()) {
43-
java.util.Arrays.asList(StageStatus.values(): _*)
43+
Arrays.asList(StageStatus.values(): _*)
4444
} else {
4545
statuses
4646
}
@@ -279,5 +279,4 @@ private[v1] object AllStagesResource {
279279
recordsWritten = internal.shuffleRecordsWritten
280280
)
281281
}
282-
283282
}

core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,7 @@
1616
*/
1717
package org.apache.spark.status.api.v1
1818

19-
import java.util.Date
20-
import java.util.{Arrays, List => JList}
19+
import java.util.{Arrays, Date, List => JList}
2120
import javax.ws.rs.{DefaultValue, GET, Produces, QueryParam}
2221
import javax.ws.rs.core.MediaType
2322

core/src/main/scala/org/apache/spark/status/api/v1/SimpleDateParam.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ import scala.util.Try
2727
private[v1] class SimpleDateParam(val originalValue: String) {
2828
val timestamp: Long = {
2929
SimpleDateParam.formats.collectFirst {
30-
case fmt if Try{ fmt.parse(originalValue) }.isSuccess =>
30+
case fmt if Try(fmt.parse(originalValue)).isSuccess =>
3131
fmt.parse(originalValue).getTime()
3232
}.getOrElse(
3333
throw new WebApplicationException(

core/src/main/scala/org/apache/spark/status/api/v1/api.scala

Lines changed: 1 addition & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -220,7 +220,6 @@ class OutputMetricDistributions(
220220
val recordsWritten: IndexedSeq[Double]
221221
)
222222

223-
224223
class ShuffleReadMetricDistributions(
225224
val readBytes: IndexedSeq[Double],
226225
val readRecords: IndexedSeq[Double],
@@ -241,13 +240,4 @@ class AccumulableInfo (
241240
val id: Long,
242241
val name: String,
243242
val update: Option[String],
244-
val value: String) {
245-
246-
override def equals(other: Any): Boolean = other match {
247-
case acc: AccumulableInfo =>
248-
this.id == acc.id && this.name == acc.name &&
249-
this.value == acc.value
250-
case _ => false
251-
}
252-
}
253-
243+
val value: String)

core/src/main/scala/org/apache/spark/ui/SparkUI.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,7 @@ package org.apache.spark.ui
1919

2020
import java.util.Date
2121

22-
import org.apache.spark.status.api.v1.{ApplicationInfo, JsonRootResource}
23-
import org.apache.spark.status.api.v1.UIRoot
22+
import org.apache.spark.status.api.v1.{ApplicationInfo, JsonRootResource, UIRoot}
2423
import org.apache.spark.{Logging, SecurityManager, SparkConf, SparkContext}
2524
import org.apache.spark.scheduler._
2625
import org.apache.spark.storage.StorageStatusListener

core/src/main/scala/org/apache/spark/ui/exec/ExecutorsPage.scala

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -28,21 +28,21 @@ import org.apache.spark.util.Utils
2828

2929
// This isn't even used anymore -- but we need to keep it b/c of a MiMa false positive
3030
private[ui] case class ExecutorSummaryInfo(
31-
id: String,
32-
hostPort: String,
33-
rddBlocks: Int,
34-
memoryUsed: Long,
35-
diskUsed: Long,
36-
activeTasks: Int,
37-
failedTasks: Int,
38-
completedTasks: Int,
39-
totalTasks: Int,
40-
totalDuration: Long,
41-
totalInputBytes: Long,
42-
totalShuffleRead: Long,
43-
totalShuffleWrite: Long,
44-
maxMemory: Long,
45-
executorLogs: Map[String, String])
31+
id: String,
32+
hostPort: String,
33+
rddBlocks: Int,
34+
memoryUsed: Long,
35+
diskUsed: Long,
36+
activeTasks: Int,
37+
failedTasks: Int,
38+
completedTasks: Int,
39+
totalTasks: Int,
40+
totalDuration: Long,
41+
totalInputBytes: Long,
42+
totalShuffleRead: Long,
43+
totalShuffleWrite: Long,
44+
maxMemory: Long,
45+
executorLogs: Map[String, String])
4646

4747

4848
private[ui] class ExecutorsPage(

core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,7 @@ class HistoryServerSuite extends FunSuite with BeforeAndAfter with Matchers with
5252
server.bind()
5353
port = server.boundPort
5454
}
55+
5556
def stop(): Unit = {
5657
server.stop()
5758
}
@@ -102,27 +103,26 @@ class HistoryServerSuite extends FunSuite with BeforeAndAfter with Matchers with
102103
"stage with accumulable json" -> "applications/local-1426533911241/stages/0/0",
103104
"rdd list storage json" -> "applications/local-1422981780767/storage/rdd",
104105
"one rdd storage json" -> "applications/local-1422981780767/storage/rdd/0"
105-
// TODO multi-attempt stages
106106
)
107107

108108
// run a bunch of characterization tests -- just verify the behavior is the same as what is saved
109109
// in the test resource folder
110110
cases.foreach { case (name, path) =>
111-
test(name) {
112-
val (code, jsonOpt, errOpt) = getContentAndCode(path)
113-
code should be (HttpServletResponse.SC_OK)
114-
jsonOpt should be ('defined)
115-
errOpt should be (None)
116-
val json = jsonOpt.get
117-
val exp = IOUtils.toString(new FileInputStream(
118-
new File(expRoot, path + "/json_expectation")))
119-
// compare the ASTs so formatting differences don't cause failures
120-
import org.json4s._
121-
import org.json4s.jackson.JsonMethods._
122-
val jsonAst = parse(json)
123-
val expAst = parse(exp)
124-
assertValidDataInJson(jsonAst, expAst)
125-
}
111+
test(name) {
112+
val (code, jsonOpt, errOpt) = getContentAndCode(path)
113+
code should be (HttpServletResponse.SC_OK)
114+
jsonOpt should be ('defined)
115+
errOpt should be (None)
116+
val json = jsonOpt.get
117+
val exp = IOUtils.toString(new FileInputStream(
118+
new File(expRoot, path + "/json_expectation")))
119+
// compare the ASTs so formatting differences don't cause failures
120+
import org.json4s._
121+
import org.json4s.jackson.JsonMethods._
122+
val jsonAst = parse(json)
123+
val expAst = parse(exp)
124+
assertValidDataInJson(jsonAst, expAst)
125+
}
126126
}
127127

128128
test("security") {

0 commit comments

Comments
 (0)