Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,10 @@ public final class UnsafeSorterSpillWriter {

private final SparkConf conf = new SparkConf();

/** The buffer size to use when writing the sorted records to an on-disk file */
/**
* The buffer size to use when writing the sorted records to an on-disk file, and
* this space used by prefix + len + recordLength must be greater than 4 + 8 bytes.
*/
private final int diskWriteBufferSize =
(int) (long) conf.get(package$.MODULE$.SHUFFLE_DISK_WRITE_BUFFER_SIZE());

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import java.util.concurrent.TimeUnit

import org.apache.spark.launcher.SparkLauncher
import org.apache.spark.network.util.ByteUnit
import org.apache.spark.unsafe.array.ByteArrayMethods
import org.apache.spark.util.Utils

package object config {
Expand Down Expand Up @@ -495,8 +496,9 @@ package object config {
ConfigBuilder("spark.shuffle.spill.diskWriteBufferSize")
.doc("The buffer size, in bytes, to use when writing the sorted records to an on-disk file.")
.bytesConf(ByteUnit.BYTE)
.checkValue(v => v > 0 && v <= Int.MaxValue,
s"The buffer size must be greater than 0 and less than ${Int.MaxValue}.")
.checkValue(v => v > 12 && v <= ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH,
s"The buffer size must be greater than 12 and less than or equal to " +
s"${ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH}.")
.createWithDefault(1024 * 1024)

private[spark] val UNROLL_MEMORY_CHECK_PERIOD =
Expand Down