Skip to content

Commit dd00b7c

Browse files
committed
Move CommitDeniedException to executors package; remove @DeveloperAPI annotation.
1 parent c79df98 commit dd00b7c

File tree

3 files changed

+12
-9
lines changed

3 files changed

+12
-9
lines changed

core/src/main/scala/org/apache/spark/SparkEnv.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,6 @@ package org.apache.spark
2020
import java.io.File
2121
import java.net.Socket
2222

23-
import org.apache.spark.scheduler.OutputCommitCoordinator.OutputCommitCoordinatorActor
24-
2523
import scala.collection.JavaConversions._
2624
import scala.collection.mutable
2725
import scala.util.Properties
@@ -37,6 +35,7 @@ import org.apache.spark.network.BlockTransferService
3735
import org.apache.spark.network.netty.NettyBlockTransferService
3836
import org.apache.spark.network.nio.NioBlockTransferService
3937
import org.apache.spark.scheduler.{OutputCommitCoordinator, LiveListenerBus}
38+
import org.apache.spark.scheduler.OutputCommitCoordinator.OutputCommitCoordinatorActor
4039
import org.apache.spark.serializer.Serializer
4140
import org.apache.spark.shuffle.{ShuffleMemoryManager, ShuffleManager}
4241
import org.apache.spark.storage._

core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,9 +26,9 @@ import org.apache.hadoop.mapred._
2626
import org.apache.hadoop.fs.FileSystem
2727
import org.apache.hadoop.fs.Path
2828

29+
import org.apache.spark.executor.CommitDeniedException
2930
import org.apache.spark.mapred.SparkHadoopMapRedUtil
3031
import org.apache.spark.rdd.HadoopRDD
31-
import org.apache.spark.util.AkkaUtils
3232

3333
/**
3434
* Internal helper class that saves an RDD using a Hadoop OutputFormat.

core/src/main/scala/org/apache/spark/CommitDeniedException.scala renamed to core/src/main/scala/org/apache/spark/executor/CommitDeniedException.scala

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -15,18 +15,22 @@
1515
* limitations under the License.
1616
*/
1717

18-
package org.apache.spark
18+
package org.apache.spark.executor
1919

20-
import org.apache.spark.annotation.DeveloperApi
20+
import org.apache.spark.{TaskCommitDenied, TaskEndReason}
2121

2222
/**
23-
* :: DeveloperApi ::
2423
* Exception thrown when a task attempts to commit output to Hadoop, but
2524
* is denied by the driver.
2625
*/
27-
@DeveloperApi
28-
class CommitDeniedException(msg: String, jobID: Int, splitID: Int, attemptID: Int)
26+
class CommitDeniedException(
27+
msg: String,
28+
jobID: Int,
29+
splitID: Int,
30+
attemptID: Int)
2931
extends Exception(msg) {
30-
def toTaskEndReason(): TaskEndReason = new TaskCommitDenied(jobID, splitID, attemptID)
32+
33+
def toTaskEndReason: TaskEndReason = new TaskCommitDenied(jobID, splitID, attemptID)
34+
3135
}
3236

0 commit comments

Comments
 (0)