Skip to content

Commit 8a58f2e

Browse files
sameeragarwalyhuai
authored andcommitted
[SPARK-17652] Fix confusing exception message while reserving capacity
## What changes were proposed in this pull request? This minor patch fixes a confusing exception message while reserving additional capacity in the vectorized parquet reader. ## How was this patch tested? Exisiting Unit Tests Author: Sameer Agarwal <[email protected]> Closes #15225 from sameeragarwal/error-msg. (cherry picked from commit 7c7586a) Signed-off-by: Yin Huai <[email protected]>
1 parent cf53241 commit 8a58f2e

File tree

2 files changed

+9
-9
lines changed

2 files changed

+9
-9
lines changed

sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnVector.java

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -285,19 +285,19 @@ public void reserve(int requiredCapacity) {
285285
try {
286286
reserveInternal(newCapacity);
287287
} catch (OutOfMemoryError outOfMemoryError) {
288-
throwUnsupportedException(newCapacity, requiredCapacity, outOfMemoryError);
288+
throwUnsupportedException(requiredCapacity, outOfMemoryError);
289289
}
290290
} else {
291-
throwUnsupportedException(newCapacity, requiredCapacity, null);
291+
throwUnsupportedException(requiredCapacity, null);
292292
}
293293
}
294294
}
295295

296-
private void throwUnsupportedException(int newCapacity, int requiredCapacity, Throwable cause) {
297-
String message = "Cannot reserve more than " + newCapacity +
298-
" bytes in the vectorized reader (requested = " + requiredCapacity + " bytes). As a" +
299-
" workaround, you can disable the vectorized reader by setting "
300-
+ SQLConf.PARQUET_VECTORIZED_READER_ENABLED().key() + " to false.";
296+
private void throwUnsupportedException(int requiredCapacity, Throwable cause) {
297+
String message = "Cannot reserve additional contiguous bytes in the vectorized reader " +
298+
"(requested = " + requiredCapacity + " bytes). As a workaround, you can disable the " +
299+
"vectorized reader by setting " + SQLConf.PARQUET_VECTORIZED_READER_ENABLED().key() +
300+
" to false.";
301301

302302
if (cause != null) {
303303
throw new RuntimeException(message, cause);

sql/core/src/test/scala/org/apache/spark/sql/execution/vectorized/ColumnarBatchSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -802,8 +802,8 @@ class ColumnarBatchSuite extends SparkFunSuite {
802802
// Over-allocating beyond MAX_CAPACITY throws an exception
803803
column.appendBytes(10, 0.toByte)
804804
}
805-
assert(ex.getMessage.contains(s"Cannot reserve more than ${column.MAX_CAPACITY} bytes in " +
806-
s"the vectorized reader"))
805+
assert(ex.getMessage.contains(s"Cannot reserve additional contiguous bytes in the " +
806+
s"vectorized reader"))
807807
}
808808
}
809809
}

0 commit comments

Comments
 (0)