Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 36 additions & 7 deletions src/HotChocolate/Core/src/Types/Text/Json/ResultDocument.MetaDb.cs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,9 @@ internal struct MetaDb : IDisposable
private static readonly ArrayPool<byte[]> s_arrayPool = ArrayPool<byte[]>.Shared;

private byte[][] _chunks;
private byte[][]? _previousChunks;
private Cursor _next;
private volatile uint _nextValue;
private bool _disposed;

internal static MetaDb CreateForEstimatedRows(int estimatedRows)
Expand All @@ -38,11 +40,20 @@ internal static MetaDb CreateForEstimatedRows(int estimatedRows)
return new MetaDb
{
_chunks = chunks,
_next = Cursor.Zero
_next = Cursor.Zero,
_nextValue = Cursor.Zero.Value
};
}

public Cursor NextCursor => _next;
public readonly Cursor NextCursor
{
[MethodImpl(MethodImplOptions.AggressiveInlining)]
get
{
var value = _nextValue;
return Unsafe.As<uint, Cursor>(ref value);
}
}

[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal Cursor Append(
Expand Down Expand Up @@ -87,9 +98,16 @@ internal Cursor Append(
newChunks[i] = [];
}

// clear and return old chunks buffer
chunks.Clear();
s_arrayPool.Return(_chunks);
// Concurrent readers may still reference the current chunks array.
// Return the previously retained one and keep the current one
// alive until the next expansion or Dispose.
if (_previousChunks is not null)
{
_previousChunks.AsSpan().Clear();
s_arrayPool.Return(_previousChunks);
}

_previousChunks = _chunks;

// assign new chunks buffer
_chunks = newChunks;
Expand Down Expand Up @@ -119,7 +137,9 @@ internal Cursor Append(
Unsafe.WriteUnaligned(ref Unsafe.Add(ref dest, byteOffset), row);

// Advance write head by one row
_next = next + 1;
var newNext = next + 1;
_next = newNext;
_nextValue = newNext.Value;
return next;
}

Expand Down Expand Up @@ -331,7 +351,9 @@ private void AssertValidCursor(Cursor cursor)
Debug.Assert(cursor.Chunk < _chunks.Length, "Chunk index out of bounds");
Debug.Assert(_chunks[cursor.Chunk].Length > 0, "Accessing unallocated chunk");

var maxExclusive = _next.Chunk * Cursor.RowsPerChunk + _next.Row;
var value = _nextValue;
var maxCursor = Unsafe.As<uint, Cursor>(ref value);
var maxExclusive = maxCursor.Chunk * Cursor.RowsPerChunk + maxCursor.Row;
var absoluteIndex = (cursor.Chunk * Cursor.RowsPerChunk) + cursor.Row;

Debug.Assert(absoluteIndex >= 0 && absoluteIndex < maxExclusive,
Expand All @@ -348,6 +370,13 @@ public void Dispose()
var chunks = _chunks.AsSpan(0, chunksLength);
Log.MetaDbDisposed(2, chunksLength, cursor.Row);

if (_previousChunks is not null)
{
_previousChunks.AsSpan().Clear();
s_arrayPool.Return(_previousChunks);
_previousChunks = null;
}

foreach (var chunk in chunks)
{
if (chunk.Length == 0)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
using GreenDonut;
using HotChocolate.Types;
using Microsoft.Extensions.DependencyInjection;
using static HotChocolate.Tests.TestHelper;

namespace HotChocolate.Execution.Integration.DataLoader;

public class Issue9500Tests
{
[Fact]
public async Task Composite_DataLoader_Result_Overflows_Selection_Buffer_When_Paging_Many_Nodes()
{
const int nodeCount = 100_000;

var executor = await CreateExecutorAsync(
c => c
.AddQueryType<Issue9500Query>()
.AddDataLoader<INoteDataLoader, NoteDataLoader>()
.ModifyRequestOptions(o => o.IncludeExceptionDetails = true));
Comment thread
michaelstaib marked this conversation as resolved.

var result = await executor.ExecuteAsync(
OperationRequestBuilder.New()
.SetDocument(
$$"""
{
items(first: {{nodeCount}}) {
edges {
cursor
node {
id
note {
comment
dueDate
progress
assignee
status
priority
category
createdBy
updatedBy
title
summary
kind
owner
reviewer
milestone
}
}
}
}
}
""")
.Build());

Assert.Empty(result.ExpectOperationResult().Errors);
}

public class Issue9500Query
{
[UsePaging(DefaultPageSize = 100000, MaxPageSize = 100000)]
public IEnumerable<Item> GetItems()
=> Enumerable.Range(0, 100_000).Select(i => new Item(i));
}
Comment thread
michaelstaib marked this conversation as resolved.

public class Item(int id)
{
public int Id { get; } = id;

public Task<Note?> GetNoteAsync(
INoteDataLoader dataLoader,
CancellationToken cancellationToken)
=> dataLoader.LoadAsync(Id, cancellationToken);
}

public interface INoteDataLoader
: IDataLoader<int, Note>;

public class NoteDataLoader(
IBatchScheduler batchScheduler,
DataLoaderOptions options)
: BatchDataLoader<int, Note>(batchScheduler, options), INoteDataLoader
{
protected override Task<IReadOnlyDictionary<int, Note>> LoadBatchAsync(
IReadOnlyList<int> keys,
CancellationToken cancellationToken)
{
return LoadAsync(keys, cancellationToken);
}

private static async Task<IReadOnlyDictionary<int, Note>> LoadAsync(
IReadOnlyList<int> keys,
CancellationToken cancellationToken)
{
await Task.Delay(1, cancellationToken);
cancellationToken.ThrowIfCancellationRequested();

return keys.ToDictionary(
key => key,
key => new Note(
$"Comment {key}",
$"2026-04-{(key % 28) + 1:00}",
Comment thread
michaelstaib marked this conversation as resolved.
key % 100,
$"Assignee {key}",
key % 2 == 0 ? "Open" : "Closed",
$"P{key % 5}",
$"Category {key % 7}",
$"Creator {key % 11}",
$"Updater {key % 13}",
$"Title {key}",
$"Summary {key}",
$"Kind {key % 3}",
$"Owner {key % 17}",
$"Reviewer {key % 19}",
$"Milestone {key % 23}"));
}
}

public record Note(
string Comment,
string DueDate,
int Progress,
string Assignee,
string Status,
string Priority,
string Category,
string CreatedBy,
string UpdatedBy,
string Title,
string Summary,
string Kind,
string Owner,
string Reviewer,
string Milestone);
}
Loading