Skip to content

Commit d05f7a9

Browse files
committed
dont use case classes for status api POJOs, since they have binary compatibility issues
1 parent 654cecf commit d05f7a9

File tree

6 files changed

+152
-152
lines changed

6 files changed

+152
-152
lines changed

core/src/main/scala/org/apache/spark/status/api/v1/AllJobsResource.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ object AllJobsResource {
7272
}
7373
val lastStageName = lastStageInfo.map { _.name }.getOrElse("(Unknown Stage Name)")
7474
val lastStageDescription = lastStageData.flatMap { _.description }
75-
JobData(
75+
new JobData(
7676
jobId = job.jobId,
7777
name = lastStageName,
7878
description = lastStageDescription,

core/src/main/scala/org/apache/spark/status/api/v1/AllRDDResource.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ object AllRDDResource {
7373

7474
val dataDistribution = if (includeDetails) {
7575
Some(storageStatusList.map { status =>
76-
RDDDataDistribution(
76+
new RDDDataDistribution(
7777
address = status.blockManagerId.hostPort,
7878
memoryUsed = status.memUsedByRdd(rddId),
7979
memoryRemaining = status.memRemaining,
@@ -84,7 +84,7 @@ object AllRDDResource {
8484
}
8585
val partitions = if (includeDetails) {
8686
Some(blocks.map { case(id, block, locations) =>
87-
RDDPartitionInfo(
87+
new RDDPartitionInfo(
8888
blockName = id.name,
8989
storageLevel = block.storageLevel.description,
9090
memoryUsed = block.memSize,
@@ -96,7 +96,7 @@ object AllRDDResource {
9696
None
9797
}
9898

99-
RDDStorageInfo(
99+
new RDDStorageInfo(
100100
id = rddId,
101101
name = rddInfo.name,
102102
numPartitions = rddInfo.numPartitions,

core/src/main/scala/org/apache/spark/status/api/v1/AllStagesResource.scala

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ object AllStagesResource {
7272
}
7373
val executorSummary = if(includeDetails) {
7474
Some(stageUiData.executorSummary.map { case(k,summary) =>
75-
k -> ExecutorStageSummary(
75+
k -> new ExecutorStageSummary(
7676
taskTime = summary.taskTime,
7777
failedTasks = summary.failedTasks,
7878
succeededTasks = summary.succeededTasks,
@@ -87,7 +87,7 @@ object AllStagesResource {
8787
} else {
8888
None
8989
}
90-
StageData(
90+
new StageData(
9191
status = status,
9292
stageId = stageInfo.stageId,
9393
numActiveTasks = stageUiData.numActiveTasks,
@@ -126,7 +126,7 @@ object AllStagesResource {
126126

127127

128128
def convertTaskData(uiData: TaskUIData): TaskData = {
129-
TaskData(
129+
new TaskData(
130130
taskId = uiData.taskInfo.taskId,
131131
index = uiData.taskInfo.index,
132132
attempt = uiData.taskInfo.attempt,
@@ -141,7 +141,7 @@ object AllStagesResource {
141141
}
142142

143143
def convertUiTaskMetrics(internal: InternalTaskMetrics): TaskMetrics = {
144-
TaskMetrics(
144+
new TaskMetrics(
145145
executorDeserializeTime = internal.executorDeserializeTime,
146146
executorRunTime = internal.executorRunTime,
147147
resultSize = internal.resultSize,
@@ -157,21 +157,21 @@ object AllStagesResource {
157157
}
158158

159159
def convertInputMetrics(internal: InternalInputMetrics): InputMetrics = {
160-
InputMetrics(
160+
new InputMetrics(
161161
bytesRead = internal.bytesRead,
162162
recordsRead = internal.recordsRead
163163
)
164164
}
165165

166166
def convertOutputMetrics(internal: InternalOutputMetrics): OutputMetrics = {
167-
OutputMetrics(
167+
new OutputMetrics(
168168
bytesWritten = internal.bytesWritten,
169169
recordsWritten = internal.recordsWritten
170170
)
171171
}
172172

173173
def convertShuffleReadMetrics(internal: InternalShuffleReadMetrics): ShuffleReadMetrics = {
174-
ShuffleReadMetrics(
174+
new ShuffleReadMetrics(
175175
remoteBlocksFetched = internal.remoteBlocksFetched,
176176
localBlocksFetched = internal.localBlocksFetched,
177177
fetchWaitTime = internal.fetchWaitTime,
@@ -182,7 +182,7 @@ object AllStagesResource {
182182
}
183183

184184
def convertShuffleWriteMetrics(internal: InternalShuffleWriteMetrics): ShuffleWriteMetrics = {
185-
ShuffleWriteMetrics(
185+
new ShuffleWriteMetrics(
186186
bytesWritten = internal.shuffleBytesWritten,
187187
writeTime = internal.shuffleWriteTime,
188188
recordsWritten = internal.shuffleRecordsWritten

core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ class ApplicationListResource(uiRoot: UIRoot) {
5555

5656
object ApplicationsListResource {
5757
def appHistoryInfoToPublicAppInfo(app: ApplicationHistoryInfo): ApplicationInfo = {
58-
ApplicationInfo(
58+
new ApplicationInfo(
5959
id = app.id,
6060
name = app.name,
6161
startTime = new Date(app.startTime),
@@ -68,7 +68,7 @@ object ApplicationsListResource {
6868
def convertApplicationInfo(
6969
internal: InternalApplicationInfo,
7070
completed: Boolean): ApplicationInfo = {
71-
ApplicationInfo(
71+
new ApplicationInfo(
7272
id = internal.id,
7373
name = internal.desc.name,
7474
startTime = new Date(internal.startTime),

0 commit comments

Comments
 (0)