Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,9 @@ public void basicTest() {
checkBasic("hello", 5); // 5 * 1 byte chars
checkBasic("大 千 世 界", 7);
checkBasic("︽﹋%", 3); // 3 * 3 bytes chars
// checkstyle.off: AvoidEscapedUnicodeCharacters
checkBasic("\uD83E\uDD19", 1); // 4 bytes char
// checkstyle.on: AvoidEscapedUnicodeCharacters
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -174,9 +174,12 @@ public void reset() {
if (consumer != null) {
consumer.freeArray(array);
// the call to consumer.allocateArray may trigger a spill
// which in turn access this instance and eventually re-enter this method and try to free the array again.
// by setting the array to null and its length to 0 we effectively make the spill code-path a no-op.
// setting the array to null also indicates that it has already been de-allocated which prevents a double de-allocation in free().
// which in turn access this instance and eventually re-enter this method
// and try to free the array again.
// By setting the array to null and its length to 0
// we effectively make the spill code-path a no-op.
// Setting the array to null also indicates that it has already been
// de-allocated which prevents a double de-allocation in free().
array = null;
usableCapacity = 0;
pos = 0;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -460,7 +460,7 @@ public void testPeakMemoryUsed() throws Exception {
@Test
public void testOOMDuringSpill() throws Exception {
final UnsafeExternalSorter sorter = newSorter();
// we assume that given default configuration,
// We assume that given default configuration,
// the size of the data we insert to the sorter (ints)
// and assuming we shouldn't spill before pointers array is exhausted
// (memory manager is not configured to throw at this point)
Expand All @@ -469,23 +469,28 @@ public void testOOMDuringSpill() throws Exception {
for (int i = 0; sorter.hasSpaceForAnotherRecord(); ++i) {
insertNumber(sorter, i);
}
// we expect the next insert to attempt growing the pointerssArray
// first allocation is expected to fail, then a spill is triggered which attempts another allocation
// We expect the next insert to attempt growing the pointerssArray
// first allocation is expected to fail, then a spill is
// triggered which attempts another allocation
// which also fails and we expect to see this OOM here.
// the original code messed with a released array within the spill code
// and ended up with a failed assertion.
// we also expect the location of the OOM to be org.apache.spark.util.collection.unsafe.sort.UnsafeInMemorySorter.reset
// We also expect the location of the OOM to be
// org.apache.spark.util.collection.unsafe.sort.UnsafeInMemorySorter.reset
memoryManager.markconsequentOOM(2);
try {
insertNumber(sorter, 1024);
fail("expected OutOfMmoryError but it seems operation surprisingly succeeded");
}
// we expect an OutOfMemoryError here, anything else (i.e the original NPE is a failure)
// We expect an OutOfMemoryError here, anything else
// (i.e the original NPE is a failure)
catch (OutOfMemoryError oom){
String oomStackTrace = Utils.exceptionString(oom);
assertThat("expected OutOfMemoryError in org.apache.spark.util.collection.unsafe.sort.UnsafeInMemorySorter.reset",
oomStackTrace,
Matchers.containsString("org.apache.spark.util.collection.unsafe.sort.UnsafeInMemorySorter.reset"));
assertThat("expected OutOfMemoryError in " +
"org.apache.spark.util.collection.unsafe.sort.UnsafeInMemorySorter.reset",
oomStackTrace,
Matchers.containsString(
"org.apache.spark.util.collection.unsafe.sort.UnsafeInMemorySorter.reset"));
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,8 @@ public int compare(
} catch (OutOfMemoryError oom) {
// as expected
}
// [SPARK-21907] this failed on NPE at org.apache.spark.memory.MemoryConsumer.freeArray(MemoryConsumer.java:108)
// [SPARK-21907] this failed on NPE at
// org.apache.spark.memory.MemoryConsumer.freeArray(MemoryConsumer.java:108)
sorter.free();
// simulate a 'back to back' free.
sorter.free();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,7 @@ public int compare(
Object baseObj2,
long baseOff2,
int baseLen2) {
// Note that since ordering doesn't need the total length of the record, we just pass -1
// Note that since ordering doesn't need the total length of the record, we just pass -1
// into the row.
row1.pointTo(baseObj1, baseOff1 + 4, -1);
row2.pointTo(baseObj2, baseOff2 + 4, -1);
Expand Down