From 4b5f7dec2b325a5e7dcd95935253e76d14fbeaf3 Mon Sep 17 00:00:00 2001 From: Fedor Indutny Date: Mon, 7 Sep 2015 17:40:43 -0700 Subject: [PATCH] deps: improve ArrayBuffer performance in v8 This a backport of the following commits from the v8's upstream: * 1a8c38c50513f9af07ada479629a653e1cf36ff3 * 206f12abee3f1e7eda8fc6521d48f3c319460ee1 * 9e3676da9ab1aaf7de3e8582cb3fdefcc3dbaf33 Original commit message: heap: make array buffer maps disjoint Remove intersection from the `std::map`s representing current live ArrayBuffers. While being simpler to understand, it poses significant performance issue for the active ArrayBuffer users (like node.js). Store buffers separately, and process them together during mark-sweep phase. The results of benchmarks are: $ ./node-slow bench && ./node-fast bench 4997.4 ns/op 4685.7 ns/op NOTE: `fast` - was a patched node.js, `slow` - unpatched node.js with vanilla v8. PR-URL: https://github.com/nodejs/node/pull/2732 Reviewed-By: Rod Vagg Reviewed-By: Brian White Reviewed-By: Roman Reiss --- deps/v8/src/heap/heap.cc | 145 +++++++++++++------------------ deps/v8/src/heap/heap.h | 15 ---- deps/v8/src/heap/mark-compact.cc | 3 + 3 files changed, 62 insertions(+), 101 deletions(-) diff --git a/deps/v8/src/heap/heap.cc b/deps/v8/src/heap/heap.cc index 3d953730013252..5bcc9097ee3369 100644 --- a/deps/v8/src/heap/heap.cc +++ b/deps/v8/src/heap/heap.cc @@ -1854,62 +1854,17 @@ void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { } -void Heap::RegisterNewArrayBufferHelper(std::map& live_buffers, - void* data, size_t length) { - live_buffers[data] = length; -} - - -void Heap::UnregisterArrayBufferHelper( - std::map& live_buffers, - std::map& not_yet_discovered_buffers, void* data) { - DCHECK(live_buffers.count(data) > 0); - live_buffers.erase(data); - not_yet_discovered_buffers.erase(data); -} - - -void Heap::RegisterLiveArrayBufferHelper( - std::map& not_yet_discovered_buffers, void* data) { - not_yet_discovered_buffers.erase(data); -} - - -size_t Heap::FreeDeadArrayBuffersHelper( - Isolate* isolate, std::map& live_buffers, - std::map& not_yet_discovered_buffers) { - size_t freed_memory = 0; - for (auto buffer = not_yet_discovered_buffers.begin(); - buffer != not_yet_discovered_buffers.end(); ++buffer) { - isolate->array_buffer_allocator()->Free(buffer->first, buffer->second); - freed_memory += buffer->second; - live_buffers.erase(buffer->first); - } - not_yet_discovered_buffers = live_buffers; - return freed_memory; -} - - -void Heap::TearDownArrayBuffersHelper( - Isolate* isolate, std::map& live_buffers, - std::map& not_yet_discovered_buffers) { - for (auto buffer = live_buffers.begin(); buffer != live_buffers.end(); - ++buffer) { - isolate->array_buffer_allocator()->Free(buffer->first, buffer->second); - } - live_buffers.clear(); - not_yet_discovered_buffers.clear(); -} - - void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data, size_t length) { if (!data) return; - RegisterNewArrayBufferHelper(live_array_buffers_, data, length); if (in_new_space) { - RegisterNewArrayBufferHelper(live_array_buffers_for_scavenge_, data, - length); + live_array_buffers_for_scavenge_[data] = length; + } else { + live_array_buffers_[data] = length; } + + // We may go over the limit of externally allocated memory here. We call the + // api function to trigger a GC in this case. reinterpret_cast(isolate_) ->AdjustAmountOfExternalAllocatedMemory(length); } @@ -1917,57 +1872,75 @@ void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data, void Heap::UnregisterArrayBuffer(bool in_new_space, void* data) { if (!data) return; - UnregisterArrayBufferHelper(live_array_buffers_, - not_yet_discovered_array_buffers_, data); - if (in_new_space) { - UnregisterArrayBufferHelper(live_array_buffers_for_scavenge_, - not_yet_discovered_array_buffers_for_scavenge_, - data); - } + + std::map* live_buffers = + in_new_space ? &live_array_buffers_for_scavenge_ : &live_array_buffers_; + std::map* not_yet_discovered_buffers = + in_new_space ? ¬_yet_discovered_array_buffers_for_scavenge_ + : ¬_yet_discovered_array_buffers_; + + DCHECK(live_buffers->count(data) > 0); + live_buffers->erase(data); + not_yet_discovered_buffers->erase(data); } void Heap::RegisterLiveArrayBuffer(bool from_scavenge, void* data) { // ArrayBuffer might be in the middle of being constructed. if (data == undefined_value()) return; - RegisterLiveArrayBufferHelper( - from_scavenge ? not_yet_discovered_array_buffers_for_scavenge_ - : not_yet_discovered_array_buffers_, - data); + if (from_scavenge) { + not_yet_discovered_array_buffers_for_scavenge_.erase(data); + } else if (!not_yet_discovered_array_buffers_.erase(data)) { + not_yet_discovered_array_buffers_for_scavenge_.erase(data); + } } void Heap::FreeDeadArrayBuffers(bool from_scavenge) { - if (from_scavenge) { - for (auto& buffer : not_yet_discovered_array_buffers_for_scavenge_) { - not_yet_discovered_array_buffers_.erase(buffer.first); - live_array_buffers_.erase(buffer.first); - } - } else { + size_t freed_memory = 0; + for (auto& buffer : not_yet_discovered_array_buffers_for_scavenge_) { + isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second); + freed_memory += buffer.second; + live_array_buffers_for_scavenge_.erase(buffer.first); + } + + if (!from_scavenge) { for (auto& buffer : not_yet_discovered_array_buffers_) { - // Scavenge can't happend during evacuation, so we only need to update - // live_array_buffers_for_scavenge_. - // not_yet_discovered_array_buffers_for_scanvenge_ will be reset before - // the next scavenge run in PrepareArrayBufferDiscoveryInNewSpace. - live_array_buffers_for_scavenge_.erase(buffer.first); + isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second); + freed_memory += buffer.second; + live_array_buffers_.erase(buffer.first); } } - size_t freed_memory = FreeDeadArrayBuffersHelper( - isolate_, - from_scavenge ? live_array_buffers_for_scavenge_ : live_array_buffers_, - from_scavenge ? not_yet_discovered_array_buffers_for_scavenge_ - : not_yet_discovered_array_buffers_); - if (freed_memory) { - reinterpret_cast(isolate_) - ->AdjustAmountOfExternalAllocatedMemory( - -static_cast(freed_memory)); - } + + not_yet_discovered_array_buffers_for_scavenge_ = + live_array_buffers_for_scavenge_; + if (!from_scavenge) not_yet_discovered_array_buffers_ = live_array_buffers_; + + // Do not call through the api as this code is triggered while doing a GC. + amount_of_external_allocated_memory_ -= freed_memory; } void Heap::TearDownArrayBuffers() { - TearDownArrayBuffersHelper(isolate_, live_array_buffers_, - not_yet_discovered_array_buffers_); + size_t freed_memory = 0; + for (auto& buffer : live_array_buffers_) { + isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second); + freed_memory += buffer.second; + } + for (auto& buffer : live_array_buffers_for_scavenge_) { + isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second); + freed_memory += buffer.second; + } + live_array_buffers_.clear(); + live_array_buffers_for_scavenge_.clear(); + not_yet_discovered_array_buffers_.clear(); + not_yet_discovered_array_buffers_for_scavenge_.clear(); + + if (freed_memory > 0) { + reinterpret_cast(isolate_) + ->AdjustAmountOfExternalAllocatedMemory( + -static_cast(freed_memory)); + } } @@ -1985,7 +1958,7 @@ void Heap::PromoteArrayBuffer(Object* obj) { // ArrayBuffer might be in the middle of being constructed. if (data == undefined_value()) return; DCHECK(live_array_buffers_for_scavenge_.count(data) > 0); - DCHECK(live_array_buffers_.count(data) > 0); + live_array_buffers_[data] = live_array_buffers_for_scavenge_[data]; live_array_buffers_for_scavenge_.erase(data); not_yet_discovered_array_buffers_for_scavenge_.erase(data); } diff --git a/deps/v8/src/heap/heap.h b/deps/v8/src/heap/heap.h index 4c9e3ad1448032..0f0cfc15fca472 100644 --- a/deps/v8/src/heap/heap.h +++ b/deps/v8/src/heap/heap.h @@ -2147,21 +2147,6 @@ class Heap { // Called on heap tear-down. Frees all remaining ArrayBuffer backing stores. void TearDownArrayBuffers(); - // These correspond to the non-Helper versions. - void RegisterNewArrayBufferHelper(std::map& live_buffers, - void* data, size_t length); - void UnregisterArrayBufferHelper( - std::map& live_buffers, - std::map& not_yet_discovered_buffers, void* data); - void RegisterLiveArrayBufferHelper( - std::map& not_yet_discovered_buffers, void* data); - size_t FreeDeadArrayBuffersHelper( - Isolate* isolate, std::map& live_buffers, - std::map& not_yet_discovered_buffers); - void TearDownArrayBuffersHelper( - Isolate* isolate, std::map& live_buffers, - std::map& not_yet_discovered_buffers); - // Record statistics before and after garbage collection. void ReportStatisticsBeforeGC(); void ReportStatisticsAfterGC(); diff --git a/deps/v8/src/heap/mark-compact.cc b/deps/v8/src/heap/mark-compact.cc index 9ca06cf2e64c44..508d49913f46c3 100644 --- a/deps/v8/src/heap/mark-compact.cc +++ b/deps/v8/src/heap/mark-compact.cc @@ -4310,6 +4310,9 @@ void MarkCompactCollector::SweepSpaces() { EvacuateNewSpaceAndCandidates(); + // NOTE: ArrayBuffers must be evacuated first, before freeing them. Otherwise + // not yet discovered buffers for scavenge will have all of them, and they + // will be erroneously freed. heap()->FreeDeadArrayBuffers(false); // ClearNonLiveReferences depends on precise sweeping of map space to