From 2d5466e907a02b45353ca56d76f7e87817843dc1 Mon Sep 17 00:00:00 2001 From: YonghuaChen Date: Tue, 15 Aug 2017 13:58:51 +0800 Subject: [PATCH] Add the validation of spark.cores.max under Streaming --- .../org/apache/spark/streaming/StreamingContext.scala | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala b/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala index f3b4ff2d1d80..c4718582d7ae 100644 --- a/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala +++ b/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala @@ -144,6 +144,13 @@ class StreamingContext private[streaming] ( } } + if (sc.conf.contains("spark.cores.max")) { + val totalCores = sc.conf.getInt("spark.cores.max", 1) + if (totalCores <= 1) { + throw new SparkException(" Total executor cores (spark.cores.max) must greater than 1") + } + } + if (sc.conf.get("spark.master") == "local" || sc.conf.get("spark.master") == "local[1]") { logWarning("spark.master should be set as local[n], n > 1 in local mode if you have receivers" + " to get data, otherwise Spark jobs will not get resources to process the received data.")