From 36ac3d642e3240bb0bcb30c2cacfebfd03afeb0e Mon Sep 17 00:00:00 2001 From: Fedor Indutny Date: Tue, 27 Oct 2015 12:54:42 -0400 Subject: [PATCH] deps: backport 8d6a228 from the v8's upstream Original commit message: [heap] fix crash during the scavenge of ArrayBuffer Scavenger should not attempt to visit ArrayBuffer's storage, it is a user-supplied pointer that may have any alignment. Visiting it, may result in a crash. BUG= R=jochen Review URL: https://codereview.chromium.org/1406133003 Cr-Commit-Position: refs/heads/master@{#31611} PR-URL: https://github.com/nodejs/node/pull/4259 Reviewed-By: Ali Ijaz Sheikh Reviewed-By: James M Snell --- deps/v8/src/heap/heap.cc | 104 +++++++++++++++++++++----------- deps/v8/src/heap/heap.h | 3 + deps/v8/test/cctest/test-api.cc | 26 ++++++++ 3 files changed, 97 insertions(+), 36 deletions(-) diff --git a/deps/v8/src/heap/heap.cc b/deps/v8/src/heap/heap.cc index e04f99ff7ee331..67e7fe34b5ad15 100644 --- a/deps/v8/src/heap/heap.cc +++ b/deps/v8/src/heap/heap.cc @@ -1876,42 +1876,8 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor, // for pointers to from semispace instead of looking for pointers // to new space. DCHECK(!target->IsMap()); - Address obj_address = target->address(); - - // We are not collecting slots on new space objects during mutation - // thus we have to scan for pointers to evacuation candidates when we - // promote objects. But we should not record any slots in non-black - // objects. Grey object's slots would be rescanned. - // White object might not survive until the end of collection - // it would be a violation of the invariant to record it's slots. - bool record_slots = false; - if (incremental_marking()->IsCompacting()) { - MarkBit mark_bit = Marking::MarkBitFrom(target); - record_slots = Marking::IsBlack(mark_bit); - } -#if V8_DOUBLE_FIELDS_UNBOXING - LayoutDescriptorHelper helper(target->map()); - bool has_only_tagged_fields = helper.all_fields_tagged(); - - if (!has_only_tagged_fields) { - for (int offset = 0; offset < size;) { - int end_of_region_offset; - if (helper.IsTagged(offset, size, &end_of_region_offset)) { - IterateAndMarkPointersToFromSpace( - target, obj_address + offset, - obj_address + end_of_region_offset, record_slots, - &Scavenger::ScavengeObject); - } - offset = end_of_region_offset; - } - } else { -#endif - IterateAndMarkPointersToFromSpace(target, obj_address, - obj_address + size, record_slots, - &Scavenger::ScavengeObject); -#if V8_DOUBLE_FIELDS_UNBOXING - } -#endif + + IteratePointersToFromSpace(target, size, &Scavenger::ScavengeObject); } } @@ -4438,6 +4404,72 @@ void Heap::IterateAndMarkPointersToFromSpace(HeapObject* object, Address start, } +void Heap::IteratePointersToFromSpace(HeapObject* target, int size, + ObjectSlotCallback callback) { + Address obj_address = target->address(); + + // We are not collecting slots on new space objects during mutation + // thus we have to scan for pointers to evacuation candidates when we + // promote objects. But we should not record any slots in non-black + // objects. Grey object's slots would be rescanned. + // White object might not survive until the end of collection + // it would be a violation of the invariant to record it's slots. + bool record_slots = false; + if (incremental_marking()->IsCompacting()) { + MarkBit mark_bit = Marking::MarkBitFrom(target); + record_slots = Marking::IsBlack(mark_bit); + } + + // Do not scavenge JSArrayBuffer's contents + switch (target->ContentType()) { + case HeapObjectContents::kTaggedValues: { + IterateAndMarkPointersToFromSpace(target, obj_address, obj_address + size, + record_slots, callback); + break; + } + case HeapObjectContents::kMixedValues: { + if (target->IsFixedTypedArrayBase()) { + IterateAndMarkPointersToFromSpace( + target, obj_address + FixedTypedArrayBase::kBasePointerOffset, + obj_address + FixedTypedArrayBase::kHeaderSize, record_slots, + callback); + } else if (target->IsBytecodeArray()) { + IterateAndMarkPointersToFromSpace( + target, obj_address + BytecodeArray::kConstantPoolOffset, + obj_address + BytecodeArray::kHeaderSize, record_slots, callback); + } else if (target->IsJSArrayBuffer()) { + IterateAndMarkPointersToFromSpace( + target, obj_address, + obj_address + JSArrayBuffer::kByteLengthOffset + kPointerSize, + record_slots, callback); + IterateAndMarkPointersToFromSpace( + target, obj_address + JSArrayBuffer::kSize, obj_address + size, + record_slots, callback); +#if V8_DOUBLE_FIELDS_UNBOXING + } else if (FLAG_unbox_double_fields) { + LayoutDescriptorHelper helper(target->map()); + DCHECK(!helper.all_fields_tagged()); + + for (int offset = 0; offset < size;) { + int end_of_region_offset; + if (helper.IsTagged(offset, size, &end_of_region_offset)) { + IterateAndMarkPointersToFromSpace( + target, obj_address + offset, + obj_address + end_of_region_offset, record_slots, callback); + } + offset = end_of_region_offset; + } +#endif + } + break; + } + case HeapObjectContents::kRawValues: { + break; + } + } +} + + void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) { IterateStrongRoots(v, mode); IterateWeakRoots(v, mode); diff --git a/deps/v8/src/heap/heap.h b/deps/v8/src/heap/heap.h index 0e427de1c93643..cb18ab5611fea5 100644 --- a/deps/v8/src/heap/heap.h +++ b/deps/v8/src/heap/heap.h @@ -1237,6 +1237,9 @@ class Heap { // Iterate pointers to from semispace of new space found in memory interval // from start to end within |object|. + void IteratePointersToFromSpace(HeapObject* target, int size, + ObjectSlotCallback callback); + void IterateAndMarkPointersToFromSpace(HeapObject* object, Address start, Address end, bool record_slots, ObjectSlotCallback callback); diff --git a/deps/v8/test/cctest/test-api.cc b/deps/v8/test/cctest/test-api.cc index 93cdce62075fd6..784ca1347eb2cb 100644 --- a/deps/v8/test/cctest/test-api.cc +++ b/deps/v8/test/cctest/test-api.cc @@ -14191,6 +14191,32 @@ THREADED_TEST(SkipArrayBufferBackingStoreDuringGC) { } +THREADED_TEST(SkipArrayBufferDuringScavenge) { + LocalContext env; + v8::Isolate* isolate = env->GetIsolate(); + v8::HandleScope handle_scope(isolate); + + // Make sure the pointer looks like a heap object + Local tmp = v8::Object::New(isolate); + uint8_t* store_ptr = + reinterpret_cast(*reinterpret_cast(*tmp)); + + // Make `store_ptr` point to from space + CcTest::heap()->CollectGarbage(i::NEW_SPACE); + + // Create ArrayBuffer with pointer-that-cannot-be-visited in the backing store + Local ab = v8::ArrayBuffer::New(isolate, store_ptr, 8); + + // Should not crash, + // i.e. backing store pointer should not be treated as a heap object pointer + CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now + CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now + + // Use `ab` to silence compiler warning + CHECK_EQ(ab->GetContents().Data(), store_ptr); +} + + THREADED_TEST(SharedUint8Array) { i::FLAG_harmony_sharedarraybuffer = true; TypedArrayTestHelper