diff --git a/core/src/main/scala/org/apache/spark/util/ThreadUtils.scala b/core/src/main/scala/org/apache/spark/util/ThreadUtils.scala index cb0c20541d0d..413507c51f37 100644 --- a/core/src/main/scala/org/apache/spark/util/ThreadUtils.scala +++ b/core/src/main/scala/org/apache/spark/util/ThreadUtils.scala @@ -26,7 +26,6 @@ import scala.language.higherKinds import com.google.common.util.concurrent.{MoreExecutors, ThreadFactoryBuilder} import scala.concurrent.{Awaitable, ExecutionContext, ExecutionContextExecutor, Future} import scala.concurrent.duration.{Duration, FiniteDuration} -import scala.concurrent.forkjoin.{ForkJoinPool => SForkJoinPool, ForkJoinWorkerThread => SForkJoinWorkerThread} import scala.util.control.NonFatal import org.apache.spark.SparkException @@ -183,15 +182,15 @@ private[spark] object ThreadUtils { /** * Construct a new Scala ForkJoinPool with a specified max parallelism and name prefix. */ - def newForkJoinPool(prefix: String, maxThreadNumber: Int): SForkJoinPool = { + def newForkJoinPool(prefix: String, maxThreadNumber: Int): ForkJoinPool = { // Custom factory to set thread names - val factory = new SForkJoinPool.ForkJoinWorkerThreadFactory { - override def newThread(pool: SForkJoinPool) = - new SForkJoinWorkerThread(pool) { + val factory = new ForkJoinPool.ForkJoinWorkerThreadFactory { + override def newThread(pool: ForkJoinPool) = + new ForkJoinWorkerThread(pool) { setName(prefix + "-" + super.getName) } } - new SForkJoinPool(maxThreadNumber, factory, + new ForkJoinPool(maxThreadNumber, factory, null, // handler false // asyncMode )