From 1facd581b3e1e37cc896a7db8d3bb8e9ab088686 Mon Sep 17 00:00:00 2001 From: fireflyc Date: Fri, 18 Jul 2014 23:19:46 +0800 Subject: [PATCH] Fixed the number of worker thread There are a lot of input Block cause too many Worker threads and will load all data.So it should be to control the number of Worker threads --- core/src/main/scala/org/apache/spark/executor/Executor.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/executor/Executor.scala b/core/src/main/scala/org/apache/spark/executor/Executor.scala index 4d3ba11633bf..47c40cc78f1c 100644 --- a/core/src/main/scala/org/apache/spark/executor/Executor.scala +++ b/core/src/main/scala/org/apache/spark/executor/Executor.scala @@ -101,8 +101,9 @@ private[spark] class Executor( // to send the result back. private val akkaFrameSize = AkkaUtils.maxFrameSizeBytes(conf) + private val nThreads = env.conf.getInt("spark.worker.threads.max", Runtime.getRuntime.availableProcessors()) // Start worker thread pool - val threadPool = Utils.newDaemonCachedThreadPool("Executor task launch worker") + val threadPool = Utils.newDaemonFixedThreadPool(nThreads, "Executor task launch worker") // Maintains the list of running tasks. private val runningTasks = new ConcurrentHashMap[Long, TaskRunner]