-
Notifications
You must be signed in to change notification settings - Fork 10k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Partition some pools by core #40476
Partition some pools by core #40476
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -32,7 +32,7 @@ internal sealed class PinnedBlockMemoryPool : MemoryPool<byte> | |
/// Thread-safe collection of blocks which are currently in the pool. A slab will pre-allocate all of the block tracking objects | ||
/// and add them to this collection. When memory is requested it is taken from here first, and when it is returned it is re-added. | ||
/// </summary> | ||
private readonly ConcurrentQueue<MemoryPoolBlock> _blocks = new ConcurrentQueue<MemoryPoolBlock>(); | ||
private readonly ConcurrentQueue<MemoryPoolBlock>[] _queues; | ||
|
||
/// <summary> | ||
/// This is part of implementing the IDisposable pattern. | ||
|
@@ -46,6 +46,15 @@ internal sealed class PinnedBlockMemoryPool : MemoryPool<byte> | |
/// </summary> | ||
private const int AnySize = -1; | ||
|
||
public PinnedBlockMemoryPool() | ||
{ | ||
_queues = new ConcurrentQueue<MemoryPoolBlock>[Environment.ProcessorCount]; | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. similarly, to https://github.com/dotnet/runtime/blob/07e87bc4cb0358f57e7116e047f7d2017b049cf9/src/libraries/System.Private.CoreLib/src/System/Buffers/TlsOverPerCoreLockedStacksArrayPool.cs#L301 I think it would be good to have some upper limit |
||
|
||
for (var i = 0; i < _queues.Length; i++) | ||
{ | ||
_queues[i] = new ConcurrentQueue<MemoryPoolBlock>(); | ||
} | ||
} | ||
public override IMemoryOwner<byte> Rent(int size = AnySize) | ||
{ | ||
if (size > _blockSize) | ||
|
@@ -58,7 +67,10 @@ public override IMemoryOwner<byte> Rent(int size = AnySize) | |
MemoryPoolThrowHelper.ThrowObjectDisposedException(MemoryPoolThrowHelper.ExceptionArgument.MemoryPool); | ||
} | ||
|
||
if (_blocks.TryDequeue(out var block)) | ||
|
||
var partition = Thread.GetCurrentProcessorId() % _queues.Length; | ||
|
||
if (_queues[partition].TryDequeue(out var block)) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. What if should it stop on first failure or try to dequeue from another partition? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. It likely would be better to assign a pooled item with a partition and return it to the same partition it was taken from. I had seen this issue before and recently again in testing with thread pool changes along with this change, where there is still a fair bit of allocation even with this change. The issue is that currently, some threads rent buffers and in many cases other threads return them, so the threads renting buffers will eventually run out and will have to allocate. Returning the buffers (or pooled items similarly for |
||
{ | ||
// block successfully taken from the stack - return it | ||
return block; | ||
|
@@ -84,7 +96,9 @@ internal void Return(MemoryPoolBlock block) | |
|
||
if (!_isDisposed) | ||
{ | ||
_blocks.Enqueue(block); | ||
var partition = Thread.GetCurrentProcessorId() % _queues.Length; | ||
|
||
_queues[partition].Enqueue(block); | ||
} | ||
} | ||
|
||
|
@@ -101,11 +115,13 @@ protected override void Dispose(bool disposing) | |
|
||
if (disposing) | ||
{ | ||
// Discard blocks in pool | ||
while (_blocks.TryDequeue(out _)) | ||
foreach (var queue in _queues) | ||
{ | ||
|
||
} | ||
while (queue.TryDequeue(out var block)) | ||
{ | ||
block.Dispose(); | ||
} | ||
} | ||
} | ||
} | ||
} | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
would it be possible to just disable the
SocketSenders
pooling for machines with large amount of cpu cores?