diff --git a/common/network-common/src/main/java/org/apache/spark/network/util/TransportConf.java b/common/network-common/src/main/java/org/apache/spark/network/util/TransportConf.java index ea52e9fe6c1c..88256b810bf0 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/util/TransportConf.java +++ b/common/network-common/src/main/java/org/apache/spark/network/util/TransportConf.java @@ -258,7 +258,11 @@ public Properties cryptoConf() { } /** - * The max number of chunks allowed to being transferred at the same time on shuffle service. + * The max number of chunks allowed to be transferred at the same time on shuffle service. + * Note that new incoming connections will be closed when the max number is hit. The client will + * retry according to the shuffle retry configs (see `spark.shuffle.io.maxRetries` and + * `spark.shuffle.io.retryWait`), if those limits are reached the task will fail with fetch + * failure. */ public long maxChunksBeingTransferred() { return conf.getLong("spark.shuffle.maxChunksBeingTransferred", Long.MAX_VALUE); diff --git a/docs/configuration.md b/docs/configuration.md index f4b6f46db5b6..500f980455b0 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -635,7 +635,11 @@ Apart from these, the following properties are also available, and may be useful spark.shuffle.maxChunksBeingTransferred Long.MAX_VALUE - The max number of chunks allowed to being transferred at the same time on shuffle service. + The max number of chunks allowed to be transferred at the same time on shuffle service. + Note that new incoming connections will be closed when the max number is hit. The client will + retry according to the shuffle retry configs (see spark.shuffle.io.maxRetries and + spark.shuffle.io.retryWait), if those limits are reached the task will fail with + fetch failure.