diff --git a/src/HotChocolate/AspNetCore/test/AspNetCore.Tests/Subscriptions/Apollo/__snapshots__/WebSocketProtocolTests.Send_Start_ReceiveDataOnMutation.snap b/src/HotChocolate/AspNetCore/test/AspNetCore.Tests/Subscriptions/Apollo/__snapshots__/WebSocketProtocolTests.Send_Start_ReceiveDataOnMutation.snap
new file mode 100644
index 00000000000..3fe244bd15b
--- /dev/null
+++ b/src/HotChocolate/AspNetCore/test/AspNetCore.Tests/Subscriptions/Apollo/__snapshots__/WebSocketProtocolTests.Send_Start_ReceiveDataOnMutation.snap
@@ -0,0 +1,11 @@
+{
+ "id": "abc",
+ "type": "data",
+ "payload": {
+ "data": {
+ "onReview": {
+ "stars": 5
+ }
+ }
+ }
+}
diff --git a/src/HotChocolate/Fusion/benchmarks/k6/deep-recursion.js b/src/HotChocolate/Fusion/benchmarks/k6/deep-recursion.js
index accba207fc0..bc804d5a1e5 100644
--- a/src/HotChocolate/Fusion/benchmarks/k6/deep-recursion.js
+++ b/src/HotChocolate/Fusion/benchmarks/k6/deep-recursion.js
@@ -3,7 +3,7 @@ import { check } from "k6";
import { Rate } from "k6/metrics";
import { textSummary } from "https://jslib.k6.io/k6-summary/0.0.1/index.js";
-const GRAPHQL_URL = 'http://localhost:5220/graphql';
+const GRAPHQL_URL = 'http://localhost:5000/graphql';
const endpoint = __ENV.GATEWAY_ENDPOINT || GRAPHQL_URL;
const mode = __ENV.MODE || "constant";
const isConstant = mode === "constant";
diff --git a/src/HotChocolate/Fusion/benchmarks/k6/eShop.Gateway/PathSegmentPoolDiagnostics.cs b/src/HotChocolate/Fusion/benchmarks/k6/eShop.Gateway/PathSegmentPoolDiagnostics.cs
new file mode 100644
index 00000000000..9ff7d13a30f
--- /dev/null
+++ b/src/HotChocolate/Fusion/benchmarks/k6/eShop.Gateway/PathSegmentPoolDiagnostics.cs
@@ -0,0 +1,180 @@
+using System.Diagnostics.Tracing;
+
+namespace eShop.Gateway;
+
+///
+/// Listens to PathSegmentPool ETW events and logs aggregated usage metrics.
+///
+internal sealed class PathSegmentPoolDiagnostics : EventListener, IHostedService
+{
+ private readonly Timer _timer;
+
+ private int _poolId;
+ private int _segmentSize;
+ private int _maxArrays;
+ private long _maxBytes;
+
+ private long _rented;
+ private long _returned;
+ private long _exhausted;
+ private long _dropped;
+ private long _allocated;
+ private long _trimmedEvents;
+ private int _lastTrimRemaining;
+ private int _lastTrimInUse;
+ private int _peakInUse;
+
+ public PathSegmentPoolDiagnostics()
+ {
+ _timer = new Timer(LogSnapshot, null, Timeout.Infinite, Timeout.Infinite);
+ }
+
+ protected override void OnEventSourceCreated(EventSource eventSource)
+ {
+ if (eventSource.Name == "HotChocolate-Fusion-PathSegmentPool")
+ {
+ EnableEvents(eventSource, EventLevel.Verbose);
+ }
+ }
+
+ protected override void OnEventWritten(EventWrittenEventArgs e)
+ {
+ // Event IDs from PathSegmentPoolEventSource:
+ // 1 = PoolCreated (PoolId, SegmentSize, Arrays, TotalBytes)
+ // 2 = SegmentRented (ArrayId, Length, PoolId, InUse)
+ // 3 = SegmentReturned (ArrayId, Length, PoolId, InUse)
+ // 4 = PoolExhausted (PoolId, MaxArrays)
+ // 5 = SegmentDropped (ArrayId, Length, PoolId)
+ // 6 = SegmentAllocated(ArrayId, Length, PoolId)
+ // 7 = PoolTrimmed (PoolId, Trimmed, Remaining, InUse)
+ switch (e.EventId)
+ {
+ case 1:
+ if (e.Payload is { Count: >= 4 })
+ {
+ if (e.Payload[0] is int poolId)
+ {
+ _poolId = poolId;
+ }
+ if (e.Payload[1] is int segmentSize)
+ {
+ _segmentSize = segmentSize;
+ }
+ if (e.Payload[2] is int maxArrays)
+ {
+ _maxArrays = maxArrays;
+ }
+ if (e.Payload[3] is long maxBytes)
+ {
+ _maxBytes = maxBytes;
+ }
+ }
+ break;
+
+ case 2:
+ Interlocked.Increment(ref _rented);
+ if (e.Payload is { Count: >= 4 } && e.Payload[3] is int inUseRent)
+ {
+ UpdatePeakInUse(inUseRent);
+ }
+ break;
+
+ case 3:
+ Interlocked.Increment(ref _returned);
+ break;
+
+ case 4:
+ Interlocked.Increment(ref _exhausted);
+ break;
+
+ case 5:
+ Interlocked.Increment(ref _dropped);
+ break;
+
+ case 6:
+ Interlocked.Increment(ref _allocated);
+ break;
+
+ case 7:
+ Interlocked.Increment(ref _trimmedEvents);
+ if (e.Payload is { Count: >= 4 })
+ {
+ if (e.Payload[2] is int remaining)
+ {
+ _lastTrimRemaining = remaining;
+ }
+ if (e.Payload[3] is int inUseTrim)
+ {
+ _lastTrimInUse = inUseTrim;
+ }
+ }
+ break;
+ }
+ }
+
+ private void UpdatePeakInUse(int inUse)
+ {
+ int current;
+ do
+ {
+ current = _peakInUse;
+ if (inUse <= current)
+ {
+ return;
+ }
+ }
+ while (Interlocked.CompareExchange(ref _peakInUse, inUse, current) != current);
+ }
+
+ private void LogSnapshot(object? state)
+ {
+ var rented = Interlocked.Read(ref _rented);
+ var returned = Interlocked.Read(ref _returned);
+ var exhausted = Interlocked.Read(ref _exhausted);
+ var dropped = Interlocked.Read(ref _dropped);
+ var allocated = Interlocked.Read(ref _allocated);
+ var trimmedEvents = Interlocked.Read(ref _trimmedEvents);
+ var outstanding = rented - returned;
+
+ Console.WriteLine(
+ "[PathSegmentPool] PoolId={0}, SegmentSize={1}, MaxArrays={2}, MaxBytes={3}, "
+ + "Rented={4}, Returned={5}, Outstanding={6}, PeakInUse={7}, "
+ + "Exhausted={8}, Allocated={9}, Dropped={10}, "
+ + "TrimmedEvents={11}, LastTrimRemaining={12}, LastTrimInUse={13}",
+ _poolId,
+ _segmentSize,
+ _maxArrays,
+ _maxBytes,
+ rented,
+ returned,
+ outstanding,
+ _peakInUse,
+ exhausted,
+ allocated,
+ dropped,
+ trimmedEvents,
+ _lastTrimRemaining,
+ _lastTrimInUse);
+ }
+
+ public Task StartAsync(CancellationToken cancellationToken)
+ {
+ Console.WriteLine("[PathSegmentPool] Diagnostics started");
+ _timer.Change(TimeSpan.FromSeconds(5), TimeSpan.FromSeconds(5));
+ return Task.CompletedTask;
+ }
+
+ public Task StopAsync(CancellationToken cancellationToken)
+ {
+ _timer.Change(Timeout.Infinite, Timeout.Infinite);
+ LogSnapshot(null);
+ Console.WriteLine("[PathSegmentPool] Diagnostics stopped - final snapshot logged above");
+ return Task.CompletedTask;
+ }
+
+ public override void Dispose()
+ {
+ _timer.Dispose();
+ base.Dispose();
+ }
+}
diff --git a/src/HotChocolate/Fusion/benchmarks/k6/eShop.Gateway/Program.cs b/src/HotChocolate/Fusion/benchmarks/k6/eShop.Gateway/Program.cs
index e14429a81db..154ff932956 100644
--- a/src/HotChocolate/Fusion/benchmarks/k6/eShop.Gateway/Program.cs
+++ b/src/HotChocolate/Fusion/benchmarks/k6/eShop.Gateway/Program.cs
@@ -1,4 +1,4 @@
-ThreadPool.SetMinThreads(1024, 1024);
+ThreadPool.SetMinThreads(512, 512);
var builder = WebApplication.CreateBuilder(args);
@@ -11,8 +11,7 @@
builder
.AddGraphQLGateway()
- .ModifyPlannerOptions(o => o.EnableRequestGrouping = true)
- .AddFileSystemConfiguration("./gateway.far");
+ .AddFileSystemConfiguration("gateway.far");
var app = builder.Build();
diff --git a/src/HotChocolate/Fusion/benchmarks/k6/eShop.Gateway/Properties/launchSettings.json b/src/HotChocolate/Fusion/benchmarks/k6/eShop.Gateway/Properties/launchSettings.json
index fd34c7bfd5c..7981999c10a 100644
--- a/src/HotChocolate/Fusion/benchmarks/k6/eShop.Gateway/Properties/launchSettings.json
+++ b/src/HotChocolate/Fusion/benchmarks/k6/eShop.Gateway/Properties/launchSettings.json
@@ -4,8 +4,8 @@
"commandName": "Project",
"dotnetRunMessages": true,
"launchBrowser": true,
- "launchUrl": "http://localhost:5220/graphql",
- "applicationUrl": "http://localhost:5220",
+ "launchUrl": "http://localhost:5000/graphql",
+ "applicationUrl": "http://localhost:5000",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Production"
}
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/DependencyInjection/HotChocolateFusionServiceCollectionExtensions.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/DependencyInjection/HotChocolateFusionServiceCollectionExtensions.cs
index 847f14a71ce..3a1fafc422b 100644
--- a/src/HotChocolate/Fusion/src/Fusion.Execution/DependencyInjection/HotChocolateFusionServiceCollectionExtensions.cs
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/DependencyInjection/HotChocolateFusionServiceCollectionExtensions.cs
@@ -3,6 +3,7 @@
using HotChocolate.Fusion.Configuration;
using HotChocolate.Fusion.Execution;
using HotChocolate.Fusion.Execution.Clients;
+using HotChocolate.Fusion.Execution.Results;
using HotChocolate.Language;
using Microsoft.Extensions.DependencyInjection.Extensions;
using Microsoft.Extensions.ObjectPool;
@@ -89,10 +90,18 @@ private static DefaultFusionGatewayBuilder CreateBuilder(
var builder = new DefaultFusionGatewayBuilder(services, name);
builder.AddDocumentCache();
+ builder.AddFetchResultStorePool();
builder.UseDefaultPipeline();
return builder;
}
+ private static void AddFetchResultStorePool(this IFusionGatewayBuilder builder)
+ => builder.ConfigureSchemaServices(
+ static (_, s) => s.TryAddSingleton(
+ new FetchResultStorePool(
+ levels: [4, 16, 64],
+ trimInterval: TimeSpan.FromMinutes(5))));
+
private static IFusionGatewayBuilder AddDocumentCache(this IFusionGatewayBuilder builder)
{
builder.Services.TryAddKeyedSingleton(
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Clients/SourceSchemaHttpClient.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Clients/SourceSchemaHttpClient.cs
index 2ddcd462d8e..3ad2525303a 100644
--- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Clients/SourceSchemaHttpClient.cs
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Clients/SourceSchemaHttpClient.cs
@@ -1,3 +1,4 @@
+using System.Buffers;
using System.Collections.Concurrent;
using System.Collections.Immutable;
using System.Diagnostics;
@@ -386,19 +387,19 @@ private static int ResolveVariableIndex(
private static bool TryGetResultPath(
SourceSchemaClientRequest request,
int variableIndex,
- out Path path,
- out ImmutableArray additionalPaths)
+ out CompactPath path,
+ out ImmutableArray additionalPaths)
{
if (request.Variables.Length == 0)
{
- path = Path.Root;
+ path = CompactPath.Root;
additionalPaths = [];
return true;
}
if ((uint)variableIndex >= (uint)request.Variables.Length)
{
- path = Path.Root;
+ path = CompactPath.Root;
additionalPaths = [];
return false;
}
@@ -489,8 +490,8 @@ private void WriteResultToChannel(
OperationPlanContext context,
ExecutionNode node,
NodeResponse nodeResponse,
- Path path,
- ImmutableArray additionalPaths,
+ CompactPath path,
+ ImmutableArray additionalPaths,
SourceResultDocument document)
{
var sourceSchemaResult = additionalPaths.IsDefaultOrEmpty
@@ -561,7 +562,7 @@ public override async IAsyncEnumerable ReadAsResultStreamAsy
{
await foreach (var result in response.ReadAsResultStreamAsync().WithCancellation(cancellationToken))
{
- var sourceSchemaResult = new SourceSchemaResult(Path.Root, result);
+ var sourceSchemaResult = new SourceSchemaResult(CompactPath.Root, result);
configuration.OnSourceSchemaResult?.Invoke(context, node, sourceSchemaResult);
@@ -575,7 +576,7 @@ public override async IAsyncEnumerable ReadAsResultStreamAsy
case 0:
{
var result = await response.ReadAsResultAsync(cancellationToken);
- var sourceSchemaResult = new SourceSchemaResult(Path.Root, result);
+ var sourceSchemaResult = new SourceSchemaResult(CompactPath.Root, result);
configuration.OnSourceSchemaResult?.Invoke(context, node, sourceSchemaResult);
@@ -714,30 +715,33 @@ public override async IAsyncEnumerable ReadAsResultStreamAsy
///
/// A streaming response for a single execution node within a batched HTTP request.
- /// Results are pushed into a by the background stream
+ /// Results are pushed into a under lock by the background stream
/// reader and signalled via a lightweight .
/// The execution node reads lazily via .
///
- private sealed class NodeResponse : SourceSchemaClientResponse
+ private sealed class NodeResponse(Uri uri, string contentType, bool isSuccessful) : SourceSchemaClientResponse
{
- private readonly ConcurrentQueue _results = new();
+#if NET9_0_OR_GREATER
+ private readonly Lock _sync = new();
+#else
+ private readonly object _sync = new();
+#endif
+ private const int InitialCapacity = 32;
+ private static readonly ArrayPool s_pool = ArrayPool.Shared;
private readonly AsyncAutoResetEvent _signal = new();
+ private SourceSchemaResult[] _results = s_pool.Rent(InitialCapacity);
+ private int _resultsCount;
+ private SourceSchemaResult[] _drain = s_pool.Rent(InitialCapacity);
+ private int _drainCount;
private volatile bool _completed;
private Exception? _error;
private bool _disposed;
- public NodeResponse(Uri uri, string contentType, bool isSuccessful)
- {
- Uri = uri;
- ContentType = contentType;
- IsSuccessful = isSuccessful;
- }
-
- public override Uri Uri { get; }
+ public override Uri Uri { get; } = uri;
- public override string ContentType { get; }
+ public override string ContentType { get; } = contentType;
- public override bool IsSuccessful { get; }
+ public override bool IsSuccessful { get; } = isSuccessful;
///
/// Gets whether at least one result has been written to this response.
@@ -752,7 +756,19 @@ internal bool TryWrite(SourceSchemaResult result)
return false;
}
- _results.Enqueue(result);
+ lock (_sync)
+ {
+ if (_resultsCount == _results.Length)
+ {
+ var newArray = s_pool.Rent(_results.Length * 2);
+ _results.AsSpan(0, _resultsCount).CopyTo(newArray);
+ s_pool.Return(_results, clearArray: true);
+ _results = newArray;
+ }
+
+ _results[_resultsCount++] = result;
+ }
+
_signal.Set();
return true;
}
@@ -771,18 +787,20 @@ public override async IAsyncEnumerable ReadAsResultStreamAsy
{
cancellationToken.ThrowIfCancellationRequested();
- while (_results.TryDequeue(out var result))
+ var (buffer, count) = Drain();
+ for (var i = 0; i < count; i++)
{
- yield return result;
+ yield return buffer[i];
}
if (_completed)
{
- // Final drain — writer may have enqueued between our last
- // TryDequeue and the completion flag becoming visible.
- while (_results.TryDequeue(out var result))
+ // Final drain, writer may have enqueued between our last
+ // drain and the completion flag becoming visible.
+ (buffer, count) = Drain();
+ for (var i = 0; i < count; i++)
{
- yield return result;
+ yield return buffer[i];
}
if (_error is not null)
@@ -797,6 +815,29 @@ public override async IAsyncEnumerable ReadAsResultStreamAsy
}
}
+ private (SourceSchemaResult[] Buffer, int Count) Drain()
+ {
+ lock (_sync)
+ {
+ if (_resultsCount == 0)
+ {
+ return (Array.Empty(), 0);
+ }
+
+ // Clear the previous drain buffer so it's ready
+ // to become the next write target.
+ _drain.AsSpan(0, _drainCount).Clear();
+ _drainCount = 0;
+
+ // Swap the buffers so the writer can keep adding
+ // while we drain outside the lock.
+ (_results, _drain) = (_drain, _results);
+ (_resultsCount, _drainCount) = (0, _resultsCount);
+ }
+
+ return (_drain, _drainCount);
+ }
+
public override void Dispose()
{
if (_disposed)
@@ -808,9 +849,18 @@ public override void Dispose()
Complete();
- while (_results.TryDequeue(out var result))
+ var (buffer, count) = Drain();
+ for (var i = 0; i < count; i++)
+ {
+ buffer[i].Dispose();
+ }
+
+ lock (_sync)
{
- result.Dispose();
+ s_pool.Return(_results, clearArray: true);
+ s_pool.Return(_drain, clearArray: true);
+ _results = [];
+ _drain = [];
}
}
}
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Clients/SourceSchemaResult.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Clients/SourceSchemaResult.cs
index 4e729741cb7..0561105e458 100644
--- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Clients/SourceSchemaResult.cs
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Clients/SourceSchemaResult.cs
@@ -26,22 +26,21 @@ public sealed class SourceSchemaResult : IDisposable
/// Whether this is the final message in a streaming response.
/// Any additional paths where this result should also be merged.
public SourceSchemaResult(
- Path path,
+ CompactPath path,
SourceResultDocument document,
FinalMessage final = FinalMessage.Undefined,
- ImmutableArray additionalPaths = default)
+ ImmutableArray additionalPaths = default)
: this(path, document, final, ownsDocument: true, additionalPaths)
{
}
private SourceSchemaResult(
- Path path,
+ CompactPath path,
SourceResultDocument document,
FinalMessage final,
bool ownsDocument,
- ImmutableArray additionalPaths)
+ ImmutableArray additionalPaths)
{
- ArgumentNullException.ThrowIfNull(path);
ArgumentNullException.ThrowIfNull(document);
_document = document;
@@ -54,13 +53,13 @@ private SourceSchemaResult(
///
/// The primary path in the composite result into which this source schema result will be merged.
///
- public Path Path { get; }
+ public CompactPath Path { get; }
///
/// Additional paths where this result should also be merged, used when a single source
/// schema response satisfies multiple selection sets at different locations.
///
- public ImmutableArray AdditionalPaths { get; }
+ public ImmutableArray AdditionalPaths { get; }
///
/// The data element of the source schema response, or an empty element if the
@@ -132,7 +131,7 @@ public SourceResultElement Extensions
/// of the underlying document. Used internally when the same result needs to be referenced
/// at a different location in the composite result.
///
- internal SourceSchemaResult WithPath(Path path)
+ internal SourceSchemaResult WithPath(CompactPath path)
=> new(path, _document, Final, ownsDocument: false, additionalPaths: []);
///
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/FusionOptions.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/FusionOptions.cs
index 55d7495d564..7eaca8831dd 100644
--- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/FusionOptions.cs
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/FusionOptions.cs
@@ -80,6 +80,27 @@ public int OperationDocumentCacheSize
}
} = 256;
+ ///
+ /// Gets or sets the initial capacity of the local path segment pool used during result composition.
+ /// 64 by default. 1 is the minimum.
+ ///
+ public int PathSegmentLocalPoolCapacity
+ {
+ get;
+ set
+ {
+ ExpectMutableOptions();
+
+ if (value < 1)
+ {
+ throw new ArgumentException(
+ "The path segment local pool capacity must be at least 1.");
+ }
+
+ field = value;
+ }
+ } = 64;
+
///
/// Gets or sets the default error handling mode.
/// by default.
@@ -160,6 +181,7 @@ public FusionOptions Clone()
OperationExecutionPlanCacheSize = OperationExecutionPlanCacheSize,
OperationExecutionPlanCacheDiagnostics = OperationExecutionPlanCacheDiagnostics,
OperationDocumentCacheSize = OperationDocumentCacheSize,
+ PathSegmentLocalPoolCapacity = PathSegmentLocalPoolCapacity,
DefaultErrorHandlingMode = DefaultErrorHandlingMode,
LazyInitialization = LazyInitialization,
NodeIdSerializerFormat = NodeIdSerializerFormat,
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/OperationBatchExecutionNode.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/OperationBatchExecutionNode.cs
index 29a6d3862ba..194d165d791 100644
--- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/OperationBatchExecutionNode.cs
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/OperationBatchExecutionNode.cs
@@ -3,6 +3,7 @@
using System.Runtime.InteropServices;
using HotChocolate.Execution;
using HotChocolate.Fusion.Execution.Clients;
+using HotChocolate.Fusion.Text.Json;
namespace HotChocolate.Fusion.Execution.Nodes;
@@ -294,7 +295,7 @@ private static void AddErrors(
pathBufferLength += 1 + variables[i].AdditionalPaths.Length;
}
- var pathBuffer = ArrayPool.Shared.Rent(pathBufferLength);
+ var pathBuffer = ArrayPool.Shared.Rent(pathBufferLength);
try
{
@@ -315,7 +316,7 @@ private static void AddErrors(
finally
{
pathBuffer.AsSpan(0, pathBufferLength).Clear();
- ArrayPool.Shared.Return(pathBuffer);
+ ArrayPool.Shared.Return(pathBuffer);
}
}
}
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/OperationExecutionNode.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/OperationExecutionNode.cs
index e0a7d462099..bb749e11c3f 100644
--- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/OperationExecutionNode.cs
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/OperationExecutionNode.cs
@@ -6,6 +6,7 @@
using HotChocolate.Execution;
using HotChocolate.Fusion.Diagnostics;
using HotChocolate.Fusion.Execution.Clients;
+using HotChocolate.Fusion.Text.Json;
namespace HotChocolate.Fusion.Execution.Nodes;
@@ -343,7 +344,7 @@ private static void AddErrors(
pathBufferLength += 1 + variables[i].AdditionalPaths.Length;
}
- var pathBuffer = ArrayPool.Shared.Rent(pathBufferLength);
+ var pathBuffer = ArrayPool.Shared.Rent(pathBufferLength);
try
{
@@ -364,7 +365,7 @@ private static void AddErrors(
finally
{
pathBuffer.AsSpan(0, pathBufferLength).Clear();
- ArrayPool.Shared.Return(pathBuffer);
+ ArrayPool.Shared.Return(pathBuffer);
}
}
}
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/JsonOperationPlanFormatter.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/JsonOperationPlanFormatter.cs
index 956c1d4388d..754909a140c 100644
--- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/JsonOperationPlanFormatter.cs
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/JsonOperationPlanFormatter.cs
@@ -9,6 +9,14 @@
namespace HotChocolate.Fusion.Execution.Nodes.Serialization;
+///
+/// Formats an as a JSON document,
+/// including its operation metadata, execution nodes, and optional trace information.
+///
+///
+/// Optional to control JSON formatting.
+/// Defaults to compact (non-indented) output with relaxed encoding.
+///
public sealed class JsonOperationPlanFormatter(JsonWriterOptions? options = null) : OperationPlanFormatter
{
private readonly JsonWriterOptions _writerOptions = options ?? new JsonWriterOptions
@@ -17,6 +25,7 @@ public sealed class JsonOperationPlanFormatter(JsonWriterOptions? options = null
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping
};
+ ///
public override string Format(OperationPlan plan, OperationPlanTrace? trace = null)
{
using var writer = new PooledArrayWriter();
@@ -24,6 +33,13 @@ public override string Format(OperationPlan plan, OperationPlanTrace? trace = nu
return Encoding.UTF8.GetString(writer.WrittenSpan);
}
+ ///
+ /// Formats the specified as JSON and writes the
+ /// UTF-8 encoded output to .
+ ///
+ /// The buffer writer to receive the JSON output.
+ /// The operation plan to format.
+ /// Optional trace information to include in the output.
public void Format(IBufferWriter writer, OperationPlan plan, OperationPlanTrace? trace = null)
{
using var jsonWriter = new Utf8JsonWriter(writer, _writerOptions);
@@ -58,7 +74,7 @@ public void Format(IBufferWriter writer, OperationPlan plan, OperationPlan
}
jsonWriter.WritePropertyName("nodes");
- WriteNodes(jsonWriter, plan.AllNodes, trace);
+ WriteNodes(jsonWriter, plan.Operation, plan.AllNodes, trace);
jsonWriter.WriteEndObject();
}
@@ -72,7 +88,7 @@ internal void Format(IBufferWriter writer, Operation operation, ImmutableA
WriteOperation(jsonWriter, operation);
jsonWriter.WritePropertyName("nodes");
- WriteNodes(jsonWriter, allNodes, null);
+ WriteNodes(jsonWriter, operation, allNodes, null);
jsonWriter.WriteEndObject();
}
@@ -100,6 +116,7 @@ private static void WriteOperation(
private static void WriteNodes(
Utf8JsonWriter jsonWriter,
+ Operation operation,
ImmutableArray allNodes,
OperationPlanTrace? trace)
{
@@ -113,19 +130,19 @@ private static void WriteNodes(
switch (node)
{
case OperationExecutionNode operationNode:
- WriteOperationNode(jsonWriter, operationNode, nodeTrace);
+ WriteOperationNode(jsonWriter, operation, operationNode, nodeTrace);
break;
case OperationBatchExecutionNode batchNode:
- WriteOperationBatchNode(jsonWriter, batchNode, nodeTrace);
+ WriteOperationBatchNode(jsonWriter, operation, batchNode, nodeTrace);
break;
case IntrospectionExecutionNode introspectionNode:
- WriteIntrospectionNode(jsonWriter, introspectionNode, nodeTrace);
+ WriteIntrospectionNode(jsonWriter, operation, introspectionNode, nodeTrace);
break;
case NodeFieldExecutionNode nodeExecutionNode:
- WriteNodeFieldNode(jsonWriter, nodeExecutionNode, nodeTrace);
+ WriteNodeFieldNode(jsonWriter, operation, nodeExecutionNode, nodeTrace);
break;
}
}
@@ -135,6 +152,7 @@ private static void WriteNodes(
private static void WriteOperationNode(
Utf8JsonWriter jsonWriter,
+ Operation operation,
OperationExecutionNode node,
ExecutionNodeTrace? trace)
{
@@ -229,13 +247,14 @@ private static void WriteOperationNode(
jsonWriter.WriteEndArray();
}
- TryWriteNodeTrace(jsonWriter, trace);
+ TryWriteNodeTrace(jsonWriter, operation, trace);
jsonWriter.WriteEndObject();
}
private static void WriteOperationBatchNode(
Utf8JsonWriter jsonWriter,
+ Operation operation,
OperationBatchExecutionNode node,
ExecutionNodeTrace? trace)
{
@@ -334,13 +353,14 @@ private static void WriteOperationBatchNode(
jsonWriter.WriteEndArray();
}
- TryWriteNodeTrace(jsonWriter, trace);
+ TryWriteNodeTrace(jsonWriter, operation, trace);
jsonWriter.WriteEndObject();
}
private static void WriteIntrospectionNode(
Utf8JsonWriter jsonWriter,
+ Operation operation,
IntrospectionExecutionNode node,
ExecutionNodeTrace? trace)
{
@@ -363,12 +383,16 @@ private static void WriteIntrospectionNode(
TryWriteConditions(jsonWriter, node);
- TryWriteNodeTrace(jsonWriter, trace);
+ TryWriteNodeTrace(jsonWriter, operation, trace);
jsonWriter.WriteEndObject();
}
- private static void WriteNodeFieldNode(Utf8JsonWriter jsonWriter, NodeFieldExecutionNode node, ExecutionNodeTrace? trace)
+ private static void WriteNodeFieldNode(
+ Utf8JsonWriter jsonWriter,
+ Operation operation,
+ NodeFieldExecutionNode node,
+ ExecutionNodeTrace? trace)
{
jsonWriter.WriteStartObject();
jsonWriter.WriteNumber("id", node.Id);
@@ -390,12 +414,12 @@ private static void WriteNodeFieldNode(Utf8JsonWriter jsonWriter, NodeFieldExecu
TryWriteConditions(jsonWriter, node);
- TryWriteNodeTrace(jsonWriter, trace);
+ TryWriteNodeTrace(jsonWriter, operation, trace);
jsonWriter.WriteEndObject();
}
- private static void TryWriteNodeTrace(Utf8JsonWriter jsonWriter, ExecutionNodeTrace? trace)
+ private static void TryWriteNodeTrace(Utf8JsonWriter jsonWriter, Operation operation, ExecutionNodeTrace? trace)
{
if (trace is not null)
{
@@ -413,7 +437,7 @@ private static void TryWriteNodeTrace(Utf8JsonWriter jsonWriter, ExecutionNodeTr
foreach (var variableSet in trace.VariableSets)
{
- jsonWriter.WritePropertyName(variableSet.Path.ToString());
+ jsonWriter.WritePropertyName(variableSet.Path.ToPath(operation).Print());
WriteObjectValueNode(jsonWriter, variableSet.Values);
}
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/JsonOperationPlanParser.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/JsonOperationPlanParser.cs
index 64ae99b087f..34448429b22 100644
--- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/JsonOperationPlanParser.cs
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/JsonOperationPlanParser.cs
@@ -6,10 +6,20 @@
namespace HotChocolate.Fusion.Execution.Nodes.Serialization;
+///
+/// Parses a JSON-encoded operation plan into an ,
+/// reconstructing the operation, execution nodes, and their dependency graph.
+///
public sealed class JsonOperationPlanParser : OperationPlanParser
{
private readonly OperationCompiler _operationCompiler;
+ ///
+ /// Initializes a new instance of .
+ ///
+ ///
+ /// The compiler used to compile parsed operation definitions.
+ ///
public JsonOperationPlanParser(OperationCompiler operationCompiler)
{
ArgumentNullException.ThrowIfNull(operationCompiler);
@@ -17,6 +27,7 @@ public JsonOperationPlanParser(OperationCompiler operationCompiler)
_operationCompiler = operationCompiler;
}
+ ///
public override OperationPlan Parse(ReadOnlyMemory planSourceText)
{
using var document = JsonDocument.Parse(planSourceText);
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/OperationPlanFormatter.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/OperationPlanFormatter.cs
index f21bf65c656..6d2c54602ca 100644
--- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/OperationPlanFormatter.cs
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/OperationPlanFormatter.cs
@@ -1,6 +1,16 @@
namespace HotChocolate.Fusion.Execution.Nodes.Serialization;
+///
+/// Base class for formatters that serialize an
+/// into a human- or machine-readable string representation.
+///
public abstract class OperationPlanFormatter
{
+ ///
+ /// Formats the specified as a string.
+ ///
+ /// The operation plan to format.
+ /// Optional trace information to include in the output.
+ /// A string representation of the operation plan.
public abstract string Format(OperationPlan plan, OperationPlanTrace? trace = null);
}
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/OperationPlanParser.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/OperationPlanParser.cs
index 5ac1d452789..1018f1cb07a 100644
--- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/OperationPlanParser.cs
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/OperationPlanParser.cs
@@ -1,6 +1,15 @@
namespace HotChocolate.Fusion.Execution.Nodes.Serialization;
+///
+/// Base class for parsers that deserialize an
+/// from its serialized representation.
+///
public abstract class OperationPlanParser
{
+ ///
+ /// Parses the specified into an .
+ ///
+ /// The serialized operation plan bytes to parse.
+ /// The deserialized .
public abstract OperationPlan Parse(ReadOnlyMemory planSourceText);
}
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/YamlOperationPlanFormatter.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/YamlOperationPlanFormatter.cs
index e8499be2cc9..d028ab0cef5 100644
--- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/YamlOperationPlanFormatter.cs
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/YamlOperationPlanFormatter.cs
@@ -2,8 +2,14 @@
namespace HotChocolate.Fusion.Execution.Nodes.Serialization;
+///
+/// Formats an as a YAML document.
+/// This formatter is intended for testing purposes and is primarily used
+/// to produce human-readable test snapshots.
+///
public sealed class YamlOperationPlanFormatter : OperationPlanFormatter
{
+ ///
public override string Format(OperationPlan plan, OperationPlanTrace? trace = null)
{
var sb = new StringBuilder();
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs
index fe6ed344502..12eb771ee89 100644
--- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs
@@ -13,6 +13,7 @@
using HotChocolate.Fusion.Text.Json;
using HotChocolate.Fusion.Types;
using HotChocolate.Language;
+using HotChocolate.Types;
using Microsoft.Extensions.DependencyInjection;
namespace HotChocolate.Fusion.Execution;
@@ -27,6 +28,7 @@ public sealed class OperationPlanContext : IFeatureProvider, IAsyncDisposable
private readonly Uri?[] _transportUris;
private readonly string?[] _transportContentTypes;
private readonly IFusionExecutionDiagnosticEvents _diagnosticEvents;
+ private readonly FetchResultStorePool _resultStorePool;
private readonly FetchResultStore _resultStore;
private readonly ExecutionState _executionState;
private readonly SourceSchemaRequestDispatcher _sourceSchemaDispatcher;
@@ -66,12 +68,15 @@ public OperationPlanContext(
_diagnosticEvents = requestContext.Schema.Services.GetRequiredService();
var errorHandler = requestContext.Schema.Services.GetRequiredService();
- _resultStore = new FetchResultStore(
+ _resultStorePool = requestContext.Schema.Services.GetRequiredService();
+ _resultStore = _resultStorePool.Rent();
+ _resultStore.Initialize(
requestContext.Schema,
errorHandler,
operationPlan.Operation,
requestContext.ErrorHandlingMode(),
- IncludeFlags);
+ IncludeFlags,
+ requestContext.Schema.GetOptions().PathSegmentLocalPoolCapacity);
_executionState = new ExecutionState(_collectTelemetry, cancellationTokenSource);
_sourceSchemaDispatcher = new SourceSchemaRequestDispatcher(this);
@@ -231,7 +236,7 @@ internal ImmutableArray CreateVariableValueSets(
}
var variableValues = GetPathThroughVariables(forwardedVariables);
- return [new VariableValues(Path.Root, new ObjectValueNode(variableValues))];
+ return [new VariableValues(CompactPath.Root, new ObjectValueNode(variableValues))];
}
else
{
@@ -253,7 +258,7 @@ internal ImmutableArray CreateVariableValueSets(
}
var variableValues = GetPathThroughVariables(forwardedVariables);
- return [new VariableValues(Path.Root, new ObjectValueNode(variableValues))];
+ return [new VariableValues(CompactPath.Root, new ObjectValueNode(variableValues))];
}
else
{
@@ -262,21 +267,58 @@ internal ImmutableArray CreateVariableValueSets(
}
}
- private static Path ToResultPath(SelectionPath selectionSet)
+ private CompactPath ToResultPath(SelectionPath selectionSet)
{
- var resultPath = Path.Root;
+ if (selectionSet.IsRoot)
+ {
+ return CompactPath.Root;
+ }
+
+ // CompactPathBuilder can run concurrently across nodes; avoid using the request-local
+ // pool here since that pool is synchronized through FetchResultStore's lock.
+ Span buffer = stackalloc int[32];
+ var builder = new CompactPathBuilder(buffer, pool: null);
+ var operation = OperationPlan.Operation;
+ var currentSelectionSet = operation.RootSelectionSet;
+ Selection? currentSelection = null;
for (var i = 0; i < selectionSet.Length; i++)
{
var segment = selectionSet[i];
- if (segment.Kind is SelectionPathSegmentKind.Root or SelectionPathSegmentKind.Field)
+ if (segment.Kind is SelectionPathSegmentKind.Root)
{
- resultPath = resultPath.Append(segment.Name);
+ continue;
+ }
+
+ if (segment.Kind is SelectionPathSegmentKind.InlineFragment)
+ {
+ if (currentSelection is null)
+ {
+ continue;
+ }
+
+ var objectType = Schema.Types.GetType(segment.Name);
+ currentSelectionSet = operation.GetSelectionSet(currentSelection, objectType);
+ continue;
+ }
+
+ if (!currentSelectionSet.TryGetSelection(segment.Name, out var selection))
+ {
+ throw new InvalidOperationException(
+ $"Could not resolve selection path segment '{segment.Name}'.");
+ }
+
+ builder.AppendField(selection.Id);
+ currentSelection = selection;
+
+ if (selection.Type.NamedType() is IObjectTypeDefinition objectTypeForSelection)
+ {
+ currentSelectionSet = operation.GetSelectionSet(selection, objectTypeForSelection);
}
}
- return resultPath;
+ return builder.ToPath();
}
internal void AddPartialResults(
@@ -286,7 +328,11 @@ internal void AddPartialResults(
bool containsErrors = true)
{
var canExecutionContinue =
- _resultStore.AddPartialResults(sourcePath, results, responseNames, containsErrors);
+ _resultStore.AddPartialResults(
+ sourcePath,
+ results,
+ responseNames,
+ containsErrors);
if (!canExecutionContinue)
{
@@ -314,6 +360,16 @@ internal void AddErrors(IError error, ReadOnlySpan responseNames, params
}
}
+ internal void AddErrors(IError error, ReadOnlySpan responseNames, ReadOnlySpan paths)
+ {
+ var canExecutionContinue = _resultStore.AddErrors(error, responseNames, paths);
+
+ if (!canExecutionContinue)
+ {
+ ExecutionState.CancelProcessing();
+ }
+ }
+
internal PooledArrayWriter CreateRentedBuffer()
=> _resultStore.CreateRentedBuffer();
@@ -446,7 +502,7 @@ public async ValueTask DisposeAsync()
_disposed = true;
DisposeNodeState();
_sourceSchemaDispatcher.Abort();
- _resultStore.Dispose();
+ _resultStorePool.Return(_resultStore);
await _clientScope.DisposeAsync();
}
}
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/AdditionalPathAccumulator.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/AdditionalPathAccumulator.cs
new file mode 100644
index 00000000000..d5bef11394b
--- /dev/null
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/AdditionalPathAccumulator.cs
@@ -0,0 +1,132 @@
+using System.Buffers;
+using System.Runtime.InteropServices;
+using HotChocolate.Fusion.Text.Json;
+
+namespace HotChocolate.Fusion.Execution.Results;
+
+///
+/// A flat, allocation-free accumulator for additional CompactPath entries
+/// that replaces per-slot List<CompactPath> with ArrayPool-rented buffers.
+/// Stores (slotIndex, path) pairs and produces ImmutableArray<CompactPath>
+/// per slot via counting sort in ApplyTo.
+///
+internal ref struct AdditionalPathAccumulator
+{
+ private CompactPath[]? _paths;
+ private int[]? _slotIndices;
+ private int _count;
+
+ public readonly bool HasEntries => _count > 0;
+
+ public void Add(int slotIndex, CompactPath path)
+ {
+ if (_paths is null)
+ {
+ _paths = ArrayPool.Shared.Rent(16);
+ _slotIndices = ArrayPool.Shared.Rent(16);
+ }
+ else if (_count == _paths.Length)
+ {
+ Grow();
+ }
+
+ _paths[_count] = path;
+ _slotIndices![_count] = slotIndex;
+ _count++;
+ }
+
+ public void ApplyTo(VariableValues[] variableValueSets, int slotCount)
+ {
+ if (_count == 0)
+ {
+ return;
+ }
+
+ // Count paths per slot.
+ var counts = slotCount <= 256
+ ? stackalloc int[slotCount]
+ : new int[slotCount];
+
+ for (var i = 0; i < _count; i++)
+ {
+ counts[_slotIndices![i]]++;
+ }
+
+ // Compute start offsets (exclusive prefix sum).
+ var offsets = slotCount <= 256
+ ? stackalloc int[slotCount]
+ : new int[slotCount];
+
+ offsets[0] = 0;
+ for (var i = 1; i < slotCount; i++)
+ {
+ offsets[i] = offsets[i - 1] + counts[i - 1];
+ }
+
+ // Scatter paths into sorted order.
+ var writePos = slotCount <= 256
+ ? stackalloc int[slotCount]
+ : new int[slotCount];
+ offsets.CopyTo(writePos);
+
+ var sorted = ArrayPool.Shared.Rent(_count);
+
+ for (var i = 0; i < _count; i++)
+ {
+ var idx = _slotIndices![i];
+ sorted[writePos[idx]++] = _paths![i];
+ }
+
+ // Build ImmutableArray for each non-empty slot from contiguous slices.
+ for (var slot = 0; slot < slotCount; slot++)
+ {
+ if (counts[slot] == 0)
+ {
+ continue;
+ }
+
+ var array = sorted.AsSpan(offsets[slot], counts[slot]).ToArray();
+ variableValueSets[slot] = variableValueSets[slot] with
+ {
+ AdditionalPaths = ImmutableCollectionsMarshal.AsImmutableArray(array)
+ };
+ }
+
+ sorted.AsSpan(0, _count).Clear();
+ ArrayPool.Shared.Return(sorted);
+ }
+
+ private void Grow()
+ {
+ var newSize = _paths!.Length * 2;
+
+ var newPaths = ArrayPool.Shared.Rent(newSize);
+ _paths.AsSpan(0, _count).CopyTo(newPaths);
+ _paths.AsSpan(0, _count).Clear();
+ ArrayPool.Shared.Return(_paths);
+ _paths = newPaths;
+
+ var newIndices = ArrayPool.Shared.Rent(newSize);
+ _slotIndices.AsSpan(0, _count).CopyTo(newIndices);
+ ArrayPool.Shared.Return(_slotIndices!);
+ _slotIndices = newIndices;
+ }
+
+ public void Dispose()
+ {
+ if (_paths is not null)
+ {
+ _paths.AsSpan(0, _count).Clear();
+ ArrayPool.Shared.Return(_paths);
+ _paths = null;
+ }
+
+ if (_slotIndices is not null)
+ {
+ ArrayPool.Shared.Return(_slotIndices);
+ _slotIndices = null;
+ }
+
+ _count = 0;
+ }
+}
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.Pooling.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.Pooling.cs
new file mode 100644
index 00000000000..acb2c2f1f2d
--- /dev/null
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.Pooling.cs
@@ -0,0 +1,145 @@
+using System.Buffers;
+using HotChocolate.Execution;
+using HotChocolate.Fusion.Execution.Nodes;
+using HotChocolate.Fusion.Text.Json;
+using HotChocolate.Language;
+
+namespace HotChocolate.Fusion.Execution.Results;
+
+internal sealed partial class FetchResultStore
+{
+ ///
+ /// Initializes the for a new request.
+ ///
+ public void Initialize(
+ ISchemaDefinition schema,
+ IErrorHandler errorHandler,
+ Operation operation,
+ ErrorHandlingMode errorHandlingMode,
+ ulong includeFlags,
+ int pathSegmentLocalPoolCapacity)
+ {
+ ArgumentNullException.ThrowIfNull(schema);
+ ArgumentNullException.ThrowIfNull(operation);
+
+ _schema = schema;
+ _errorHandler = errorHandler;
+ _operation = operation;
+ _errorHandlingMode = errorHandlingMode;
+ _includeFlags = includeFlags;
+ _disposed = false;
+
+ _pathPool ??= new PathSegmentLocalPool(pathSegmentLocalPoolCapacity);
+ _result = new CompositeResultDocument(operation, includeFlags, _pathPool);
+
+ _valueCompletion = new ValueCompletion(
+ this,
+ _schema,
+ _errorHandler,
+ _errorHandlingMode,
+ maxDepth: 32);
+
+ _memory.Push(_result);
+ }
+
+ public void Reset()
+ {
+ ObjectDisposedException.ThrowIf(_disposed, this);
+
+ _result = new CompositeResultDocument(_operation, _includeFlags, _pathPool);
+ _errors?.Clear();
+
+ _valueCompletion = new ValueCompletion(
+ this,
+ _schema,
+ _errorHandler,
+ _errorHandlingMode,
+ maxDepth: 32);
+
+ _memory.Push(_result);
+ }
+
+ ///
+ /// Cleans the store for return to the pool.
+ /// Releases per-request state while retaining reusable buffers.
+ ///
+ internal void Clean(int maxCollectTargetRetainLength, int maxDictionaryRetainCapacity)
+ {
+ // drain and dispose per-request memory
+ while (_memory.TryPop(out var memory))
+ {
+ memory.Dispose();
+ }
+
+ // return path segments to global pool and reset local pool
+ _pathPool.Dispose();
+ _pathPool = null!;
+
+ // clear errors
+ _errors?.Clear();
+
+ // clear collect target arrays to unroot CompositeResultDocument references;
+ // if they grew too large during a burst, swap them for smaller ones.
+ TrimOrClearBuffer(ref _collectTargetA, maxCollectTargetRetainLength);
+ TrimOrClearBuffer(ref _collectTargetB, maxCollectTargetRetainLength);
+ TrimOrClearBuffer(ref _collectTargetCombined, maxCollectTargetRetainLength);
+
+ // clear dictionaries/hashsets; drop oversized ones.
+ TrimOrClear(ref _seenPaths, maxDictionaryRetainCapacity, ReferenceEqualityComparer.Instance);
+ TrimOrClear(ref _seenStrings, maxDictionaryRetainCapacity, StringComparer.Ordinal);
+ TrimOrClear(ref _seenValueNodes, maxDictionaryRetainCapacity, SingleValueNodeComparer.Instance);
+ TrimOrClear(ref _seenTwoValueTuples, maxDictionaryRetainCapacity, TwoValueNodeTupleComparer.Instance);
+ TrimOrClear(ref _seenThreeValueTuples, maxDictionaryRetainCapacity, ThreeValueNodeTupleComparer.Instance);
+
+ // null out per-request references
+ _result = default!;
+ _valueCompletion = default!;
+ _schema = default!;
+ _errorHandler = default!;
+ _operation = default!;
+ }
+
+ private static void TrimOrClearBuffer(ref CompositeResultElement[] buffer, int maxRetainLength)
+ {
+ if (buffer.Length > maxRetainLength)
+ {
+ ArrayPool.Shared.Return(buffer, clearArray: true);
+ buffer = ArrayPool.Shared.Rent(64);
+ }
+ else
+ {
+ buffer.AsSpan().Clear();
+ }
+ }
+
+ private static void TrimOrClear(
+ ref HashSet set,
+ int maxRetainCapacity,
+ IEqualityComparer comparer)
+ {
+ if (set.Count > maxRetainCapacity)
+ {
+ set = new HashSet(comparer);
+ }
+ else
+ {
+ set.Clear();
+ }
+ }
+
+ private static void TrimOrClear(
+ ref Dictionary dict,
+ int maxRetainCapacity,
+ IEqualityComparer comparer)
+ where TKey : notnull
+ {
+ if (dict.Count > maxRetainCapacity)
+ {
+ dict = new Dictionary(comparer);
+ }
+ else
+ {
+ dict.Clear();
+ }
+ }
+}
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs
index 91b5524efe1..af0902d5a39 100644
--- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs
@@ -17,72 +17,33 @@
namespace HotChocolate.Fusion.Execution.Results;
-internal sealed class FetchResultStore : IDisposable
+internal sealed partial class FetchResultStore : IDisposable
{
#if NET9_0_OR_GREATER
private readonly Lock _lock = new();
#else
private readonly object _lock = new();
#endif
- private readonly ISchemaDefinition _schema;
- private readonly IErrorHandler _errorHandler;
- private readonly Operation _operation;
- private readonly ErrorHandlingMode _errorHandlingMode;
- private readonly ulong _includeFlags;
private readonly ConcurrentStack _memory = [];
+ private ISchemaDefinition _schema = default!;
+ private IErrorHandler _errorHandler = default!;
+ private Operation _operation = default!;
+ private ErrorHandlingMode _errorHandlingMode;
+ private ulong _includeFlags;
private CompositeResultElement[] _collectTargetA = ArrayPool.Shared.Rent(64);
private CompositeResultElement[] _collectTargetB = ArrayPool.Shared.Rent(64);
private CompositeResultElement[] _collectTargetCombined = ArrayPool.Shared.Rent(64);
- private CompositeResultDocument _result;
- private ValueCompletion _valueCompletion;
+ private PathSegmentLocalPool _pathPool = default!;
+ private HashSet _seenPaths = new(ReferenceEqualityComparer.Instance);
+ private Dictionary _seenStrings = new(StringComparer.Ordinal);
+ private Dictionary _seenValueNodes = new(SingleValueNodeComparer.Instance);
+ private Dictionary _seenTwoValueTuples = new(TwoValueNodeTupleComparer.Instance);
+ private Dictionary _seenThreeValueTuples = new(ThreeValueNodeTupleComparer.Instance);
+ private CompositeResultDocument _result = default!;
+ private ValueCompletion _valueCompletion = default!;
private List? _errors;
private bool _disposed;
- public FetchResultStore(
- ISchemaDefinition schema,
- IErrorHandler errorHandler,
- Operation operation,
- ErrorHandlingMode errorHandlingMode,
- ulong includeFlags)
- {
- ArgumentNullException.ThrowIfNull(schema);
- ArgumentNullException.ThrowIfNull(operation);
-
- _schema = schema;
- _errorHandler = errorHandler;
- _operation = operation;
- _errorHandlingMode = errorHandlingMode;
- _includeFlags = includeFlags;
-
- _result = new CompositeResultDocument(operation, includeFlags);
-
- _valueCompletion = new ValueCompletion(
- this,
- _schema,
- _errorHandler,
- _errorHandlingMode,
- maxDepth: 32);
-
- _memory.Push(_result);
- }
-
- public void Reset()
- {
- ObjectDisposedException.ThrowIf(_disposed, this);
-
- _result = new CompositeResultDocument(_operation, _includeFlags);
- _errors?.Clear();
-
- _valueCompletion = new ValueCompletion(
- this,
- _schema,
- _errorHandler,
- _errorHandlingMode,
- maxDepth: 32);
-
- _memory.Push(_result);
- }
-
public CompositeResultDocument Result => _result;
public IReadOnlyList? Errors => _errors;
@@ -164,13 +125,15 @@ public bool AddPartialResults(
{
var result = results[i];
- if (!SaveSafeResult(
- resultData,
- result.Path,
- result.AdditionalPaths.AsSpan(),
- dataElementsSpan[i],
- errorTriesSpan[i],
- responseNames))
+ var success = SaveSafeResult(
+ resultData,
+ result.Path,
+ result.AdditionalPaths.AsSpan(),
+ dataElementsSpan[i],
+ errorTriesSpan[i],
+ responseNames);
+
+ if (!success)
{
return false;
}
@@ -181,6 +144,11 @@ public bool AddPartialResults(
}
finally
{
+ lock (_lock)
+ {
+ ReturnPathSegments(results);
+ }
+
dataElementsSpan.Clear();
errorTriesSpan.Clear();
ArrayPool.Shared.Return(dataElements);
@@ -213,13 +181,15 @@ private bool AddPartialResultsNoErrors(
{
var result = results[i];
- if (!SaveSafeResult(
- resultData,
- result.Path,
- result.AdditionalPaths.AsSpan(),
- dataElementsSpan[i],
- errorTrie: null,
- responseNames))
+ var success = SaveSafeResult(
+ resultData,
+ result.Path,
+ result.AdditionalPaths.AsSpan(),
+ dataElementsSpan[i],
+ errorTrie: null,
+ responseNames);
+
+ if (!success)
{
return false;
}
@@ -230,6 +200,11 @@ private bool AddPartialResultsNoErrors(
}
finally
{
+ lock (_lock)
+ {
+ ReturnPathSegments(results);
+ }
+
dataElementsSpan.Clear();
ArrayPool.Shared.Return(dataElements);
}
@@ -246,21 +221,31 @@ private bool AddSinglePartialResult(
var dataElement = GetDataElement(sourcePath, result.Data);
var errorTrie = GetErrorTrie(sourcePath, errors?.Trie);
- lock (_lock)
+ try
{
- if (errors?.RootErrors is { Length: > 0 } rootErrors)
+ lock (_lock)
{
- _errors ??= [];
- _errors.AddRange(rootErrors);
- }
+ if (errors?.RootErrors is { Length: > 0 } rootErrors)
+ {
+ _errors ??= [];
+ _errors.AddRange(rootErrors);
+ }
- return SaveSafeResult(
- _result.Data,
- result.Path,
- result.AdditionalPaths.AsSpan(),
- dataElement,
- errorTrie,
- responseNames);
+ return SaveSafeResult(
+ _result.Data,
+ result.Path,
+ result.AdditionalPaths.AsSpan(),
+ dataElement,
+ errorTrie,
+ responseNames);
+ }
+ }
+ finally
+ {
+ lock (_lock)
+ {
+ ReturnPathSegments(result);
+ }
}
}
@@ -272,15 +257,25 @@ private bool AddSinglePartialResultNoErrors(
_memory.Push(result);
var dataElement = GetDataElement(sourcePath, result.Data);
- lock (_lock)
+ try
{
- return SaveSafeResult(
- _result.Data,
- result.Path,
- result.AdditionalPaths.AsSpan(),
- dataElement,
- errorTrie: null,
- responseNames);
+ lock (_lock)
+ {
+ return SaveSafeResult(
+ _result.Data,
+ result.Path,
+ result.AdditionalPaths.AsSpan(),
+ dataElement,
+ errorTrie: null,
+ responseNames);
+ }
+ }
+ finally
+ {
+ lock (_lock)
+ {
+ ReturnPathSegments(result);
+ }
}
}
@@ -329,6 +324,47 @@ public bool AddErrors(IError error, ReadOnlySpan responseNames, params R
ref var end = ref Unsafe.Add(ref path, paths.Length);
var resultData = _result.Data;
+ while (Unsafe.IsAddressLessThan(ref path, ref end))
+ {
+ if (resultData.IsInvalidated)
+ {
+ return false;
+ }
+
+ var element = path.IsRoot ? resultData : GetStartObjectResult(path);
+ if (element.IsNullOrInvalidated)
+ {
+ goto AddErrors_Next;
+ }
+
+ var canExecutionContinue =
+ _valueCompletion.BuildErrorResult(
+ element,
+ responseNames,
+ error,
+ element.CompactPath);
+ if (!canExecutionContinue)
+ {
+ resultData.Invalidate();
+ return false;
+ }
+
+AddErrors_Next:
+ path = ref Unsafe.Add(ref path, 1)!;
+ }
+ }
+
+ return true;
+ }
+
+ public bool AddErrors(IError error, ReadOnlySpan responseNames, ReadOnlySpan paths)
+ {
+ lock (_lock)
+ {
+ ref var path = ref MemoryMarshal.GetReference(paths);
+ ref var end = ref Unsafe.Add(ref path, paths.Length);
+ var resultData = _result.Data;
+
while (Unsafe.IsAddressLessThan(ref path, ref end))
{
if (resultData.IsInvalidated)
@@ -364,8 +400,8 @@ public bool AddErrors(IError error, ReadOnlySpan responseNames, params R
private bool SaveSafeResult(
CompositeResultElement resultData,
- Path path,
- ReadOnlySpan additionalPaths,
+ CompactPath path,
+ ReadOnlySpan additionalPaths,
SourceResultElement dataElement,
ErrorTrie? errorTrie,
ReadOnlySpan responseNames)
@@ -388,7 +424,7 @@ private bool SaveSafeResult(
private bool SaveSafeResult(
CompositeResultElement resultData,
- Path path,
+ CompactPath path,
SourceResultElement dataElement,
ErrorTrie? errorTrie,
ReadOnlySpan responseNames)
@@ -570,7 +606,7 @@ private ReadOnlySpan CollectTargetElements(SelectionPath
// Store potentially grown arrays back.
_collectTargetA = current;
_collectTargetB = next;
- return ReadOnlySpan.Empty;
+ return [];
}
}
@@ -624,7 +660,7 @@ private ImmutableArray BuildVariableValueSets(
VariableValues[]? variableValueSets = null;
Dictionary? seen = null;
- List?[]? additionalPaths = null;
+ var additionalPaths = new AdditionalPathAccumulator();
var nextIndex = 0;
foreach (var result in elements)
@@ -647,15 +683,14 @@ private ImmutableArray BuildVariableValueSets(
if (seen.TryGetValue(variables, out var existingIndex))
{
- additionalPaths ??= new List?[elements.Length];
- (additionalPaths[existingIndex] ??= []).Add(result.Path);
+ additionalPaths.Add(existingIndex, result.CompactPath);
continue;
}
seen[variables] = nextIndex;
}
- variableValueSets[nextIndex++] = new VariableValues(result.Path, variables);
+ variableValueSets[nextIndex++] = new VariableValues(result.CompactPath, variables);
}
if (buffer is not null)
@@ -663,7 +698,7 @@ private ImmutableArray BuildVariableValueSets(
_memory.Push(buffer);
}
- return FinalizeVariableValueSets(variableValueSets, additionalPaths, nextIndex);
+ return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex);
}
private ImmutableArray BuildVariableValueSetsSingleRequirement(
@@ -690,9 +725,7 @@ private ImmutableArray BuildVariableValueSetsSingleRequirementFa
ref PooledArrayWriter? buffer)
{
VariableValues[]? variableValueSets = null;
- Dictionary? seen = null;
- Dictionary? seenStrings = null;
- List?[]? additionalPaths = null;
+ var additionalPaths = new AdditionalPathAccumulator();
var nextIndex = 0;
var isNonNullRequirement = requirement.Type.Kind is SyntaxKind.NonNullType;
@@ -724,36 +757,30 @@ private ImmutableArray BuildVariableValueSetsSingleRequirementFa
{
var stringValue = value.AssertString();
- if (seenStrings is not null
- && seenStrings.TryGetValue(stringValue, out var existingIndex))
+ if (_seenStrings.TryGetValue(stringValue, out var existingIndex))
{
- additionalPaths ??= new List?[elements.Length];
- (additionalPaths[existingIndex] ??= []).Add(result.Path);
+ additionalPaths.Add(existingIndex, result.CompactPath);
continue;
}
mappedValue = ResultDataMapper.GetStringValueNode(stringValue);
- seenStrings ??= new Dictionary(elements.Length, StringComparer.Ordinal);
- seenStrings[stringValue] = nextIndex;
+ _seenStrings[stringValue] = nextIndex;
}
else
{
mappedValue = ResultDataMapper.MapLeafValue(value, ref buffer);
- if (seen is not null
- && seen.TryGetValue(mappedValue, out var existingIndex))
+ if (_seenValueNodes.TryGetValue(mappedValue, out var existingIndex))
{
- additionalPaths ??= new List?[elements.Length];
- (additionalPaths[existingIndex] ??= []).Add(result.Path);
+ additionalPaths.Add(existingIndex, result.CompactPath);
continue;
}
- seen ??= new Dictionary(elements.Length, SingleValueNodeComparer.Instance);
- seen[mappedValue] = nextIndex;
+ _seenValueNodes[mappedValue] = nextIndex;
}
variableValueSets[nextIndex++] = new VariableValues(
- result.Path,
+ result.CompactPath,
new ObjectValueNode([
new ObjectFieldNode(
requirement.Key,
@@ -761,7 +788,9 @@ private ImmutableArray BuildVariableValueSetsSingleRequirementFa
]));
}
- return FinalizeVariableValueSets(variableValueSets, additionalPaths, nextIndex);
+ _seenStrings.Clear();
+ _seenValueNodes.Clear();
+ return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex);
}
private ImmutableArray BuildVariableValueSetsSingleRequirementSlowPath(
@@ -770,9 +799,9 @@ private ImmutableArray BuildVariableValueSetsSingleRequirementSl
ref PooledArrayWriter? buffer)
{
VariableValues[]? variableValueSets = null;
- Dictionary? seen = null;
- List?[]? additionalPaths = null;
+ var additionalPaths = new AdditionalPathAccumulator();
var nextIndex = 0;
+ var seeded = false;
foreach (var result in elements)
{
@@ -792,27 +821,28 @@ private ImmutableArray BuildVariableValueSetsSingleRequirementSl
if (nextIndex > 0)
{
- seen ??= new Dictionary(elements.Length, SingleValueNodeComparer.Instance)
+ if (!seeded)
{
- [variableValueSets[0].Values.Fields[0].Value] = 0
- };
+ _seenValueNodes[variableValueSets[0].Values.Fields[0].Value] = 0;
+ seeded = true;
+ }
- if (seen.TryGetValue(value, out var existingIndex))
+ if (_seenValueNodes.TryGetValue(value, out var existingIndex))
{
- additionalPaths ??= new List?[elements.Length];
- (additionalPaths[existingIndex] ??= []).Add(result.Path);
+ additionalPaths.Add(existingIndex, result.CompactPath);
continue;
}
- seen[value] = nextIndex;
+ _seenValueNodes[value] = nextIndex;
}
variableValueSets[nextIndex++] = new VariableValues(
- result.Path,
+ result.CompactPath,
new ObjectValueNode([new ObjectFieldNode(requirement.Key, value)]));
}
- return FinalizeVariableValueSets(variableValueSets, additionalPaths, nextIndex);
+ _seenValueNodes.Clear();
+ return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex);
}
private ImmutableArray BuildVariableValueSetsTwoRequirements(
@@ -849,9 +879,9 @@ private ImmutableArray BuildVariableValueSetsTwoRequirementsFast
ref PooledArrayWriter? buffer)
{
VariableValues[]? variableValueSets = null;
- Dictionary? seen = null;
- List?[]? additionalPaths = null;
+ var additionalPaths = new AdditionalPathAccumulator();
var nextIndex = 0;
+ var seeded = false;
foreach (var result in elements)
{
@@ -878,32 +908,33 @@ private ImmutableArray BuildVariableValueSetsTwoRequirementsFast
if (nextIndex > 0)
{
- seen ??= new Dictionary(elements.Length, TwoValueNodeTupleComparer.Instance)
+ if (!seeded)
{
- [new TwoValueNodeTuple(
+ _seenTwoValueTuples[new TwoValueNodeTuple(
variableValueSets[0].Values.Fields[0].Value,
- variableValueSets[0].Values.Fields[1].Value)] = 0
- };
+ variableValueSets[0].Values.Fields[1].Value)] = 0;
+ seeded = true;
+ }
- if (seen.TryGetValue(key, out var existingIndex))
+ if (_seenTwoValueTuples.TryGetValue(key, out var existingIndex))
{
- additionalPaths ??= new List?[elements.Length];
- (additionalPaths[existingIndex] ??= []).Add(result.Path);
+ additionalPaths.Add(existingIndex, result.CompactPath);
continue;
}
- seen[key] = nextIndex;
+ _seenTwoValueTuples[key] = nextIndex;
}
variableValueSets[nextIndex++] = new VariableValues(
- result.Path,
+ result.CompactPath,
new ObjectValueNode([
new ObjectFieldNode(requirement1.Key, mappedValue1),
new ObjectFieldNode(requirement2.Key, mappedValue2)
]));
}
- return FinalizeVariableValueSets(variableValueSets, additionalPaths, nextIndex);
+ _seenTwoValueTuples.Clear();
+ return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex);
}
private ImmutableArray BuildVariableValueSetsTwoRequirementsSlowPath(
@@ -913,9 +944,9 @@ private ImmutableArray BuildVariableValueSetsTwoRequirementsSlow
ref PooledArrayWriter? buffer)
{
VariableValues[]? variableValueSets = null;
- Dictionary? seen = null;
- List?[]? additionalPaths = null;
+ var additionalPaths = new AdditionalPathAccumulator();
var nextIndex = 0;
+ var seeded = false;
foreach (var result in elements)
{
@@ -942,32 +973,33 @@ private ImmutableArray BuildVariableValueSetsTwoRequirementsSlow
if (nextIndex > 0)
{
- seen ??= new Dictionary(elements.Length, TwoValueNodeTupleComparer.Instance)
+ if (!seeded)
{
- [new TwoValueNodeTuple(
+ _seenTwoValueTuples[new TwoValueNodeTuple(
variableValueSets[0].Values.Fields[0].Value,
- variableValueSets[0].Values.Fields[1].Value)] = 0
- };
+ variableValueSets[0].Values.Fields[1].Value)] = 0;
+ seeded = true;
+ }
- if (seen.TryGetValue(key, out var existingIndex))
+ if (_seenTwoValueTuples.TryGetValue(key, out var existingIndex))
{
- additionalPaths ??= new List?[elements.Length];
- (additionalPaths[existingIndex] ??= []).Add(result.Path);
+ additionalPaths.Add(existingIndex, result.CompactPath);
continue;
}
- seen[key] = nextIndex;
+ _seenTwoValueTuples[key] = nextIndex;
}
variableValueSets[nextIndex++] = new VariableValues(
- result.Path,
+ result.CompactPath,
new ObjectValueNode([
new ObjectFieldNode(requirement1.Key, value1),
new ObjectFieldNode(requirement2.Key, value2)
]));
}
- return FinalizeVariableValueSets(variableValueSets, additionalPaths, nextIndex);
+ _seenTwoValueTuples.Clear();
+ return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex);
}
private ImmutableArray BuildVariableValueSetsThreeRequirements(
@@ -1011,9 +1043,9 @@ private ImmutableArray BuildVariableValueSetsThreeRequirementsFa
ref PooledArrayWriter? buffer)
{
VariableValues[]? variableValueSets = null;
- Dictionary? seen = null;
- List?[]? additionalPaths = null;
+ var additionalPaths = new AdditionalPathAccumulator();
var nextIndex = 0;
+ var seeded = false;
foreach (var result in elements)
{
@@ -1049,26 +1081,26 @@ private ImmutableArray BuildVariableValueSetsThreeRequirementsFa
if (nextIndex > 0)
{
- seen ??= new Dictionary(elements.Length, ThreeValueNodeTupleComparer.Instance)
+ if (!seeded)
{
- [new ThreeValueNodeTuple(
+ _seenThreeValueTuples[new ThreeValueNodeTuple(
variableValueSets[0].Values.Fields[0].Value,
variableValueSets[0].Values.Fields[1].Value,
- variableValueSets[0].Values.Fields[2].Value)] = 0
- };
+ variableValueSets[0].Values.Fields[2].Value)] = 0;
+ seeded = true;
+ }
- if (seen.TryGetValue(key, out var existingIndex))
+ if (_seenThreeValueTuples.TryGetValue(key, out var existingIndex))
{
- additionalPaths ??= new List?[elements.Length];
- (additionalPaths[existingIndex] ??= []).Add(result.Path);
+ additionalPaths.Add(existingIndex, result.CompactPath);
continue;
}
- seen[key] = nextIndex;
+ _seenThreeValueTuples[key] = nextIndex;
}
variableValueSets[nextIndex++] = new VariableValues(
- result.Path,
+ result.CompactPath,
new ObjectValueNode([
new ObjectFieldNode(requirement1.Key, mappedValue1),
new ObjectFieldNode(requirement2.Key, mappedValue2),
@@ -1076,7 +1108,8 @@ [new ThreeValueNodeTuple(
]));
}
- return FinalizeVariableValueSets(variableValueSets, additionalPaths, nextIndex);
+ _seenThreeValueTuples.Clear();
+ return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex);
}
private ImmutableArray BuildVariableValueSetsThreeRequirementsSlowPath(
@@ -1087,9 +1120,9 @@ private ImmutableArray BuildVariableValueSetsThreeRequirementsSl
ref PooledArrayWriter? buffer)
{
VariableValues[]? variableValueSets = null;
- Dictionary? seen = null;
- List?[]? additionalPaths = null;
+ var additionalPaths = new AdditionalPathAccumulator();
var nextIndex = 0;
+ var seeded = false;
foreach (var result in elements)
{
@@ -1125,26 +1158,26 @@ private ImmutableArray BuildVariableValueSetsThreeRequirementsSl
if (nextIndex > 0)
{
- seen ??= new Dictionary(elements.Length, ThreeValueNodeTupleComparer.Instance)
+ if (!seeded)
{
- [new ThreeValueNodeTuple(
+ _seenThreeValueTuples[new ThreeValueNodeTuple(
variableValueSets[0].Values.Fields[0].Value,
variableValueSets[0].Values.Fields[1].Value,
- variableValueSets[0].Values.Fields[2].Value)] = 0
- };
+ variableValueSets[0].Values.Fields[2].Value)] = 0;
+ seeded = true;
+ }
- if (seen.TryGetValue(key, out var existingIndex))
+ if (_seenThreeValueTuples.TryGetValue(key, out var existingIndex))
{
- additionalPaths ??= new List?[elements.Length];
- (additionalPaths[existingIndex] ??= []).Add(result.Path);
+ additionalPaths.Add(existingIndex, result.CompactPath);
continue;
}
- seen[key] = nextIndex;
+ _seenThreeValueTuples[key] = nextIndex;
}
variableValueSets[nextIndex++] = new VariableValues(
- result.Path,
+ result.CompactPath,
new ObjectValueNode([
new ObjectFieldNode(requirement1.Key, value1),
new ObjectFieldNode(requirement2.Key, value2),
@@ -1152,7 +1185,8 @@ [new ThreeValueNodeTuple(
]));
}
- return FinalizeVariableValueSets(variableValueSets, additionalPaths, nextIndex);
+ _seenThreeValueTuples.Clear();
+ return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex);
}
private ObjectValueNode? MapRequirements(
@@ -1385,6 +1419,13 @@ private CompositeResultElement GetStartObjectResult(Path path)
return result.ValueKind is JsonValueKind.Object or JsonValueKind.Null ? result : default;
}
+ private CompositeResultElement GetStartObjectResult(CompactPath path)
+ {
+ var result = GetStartResult(path);
+ Debug.Assert(result.ValueKind is JsonValueKind.Object or JsonValueKind.Null or JsonValueKind.Undefined);
+ return result.ValueKind is JsonValueKind.Object or JsonValueKind.Null ? result : default;
+ }
+
private CompositeResultElement GetStartResult(Path path)
{
if (path.IsRoot)
@@ -1421,6 +1462,45 @@ private CompositeResultElement GetStartResult(Path path)
$"The path segment '{parent}' does not exist in the data.");
}
+ private CompositeResultElement GetStartResult(CompactPath path)
+ {
+ var element = _result.Data;
+
+ for (var i = 0; i < path.Length; i++)
+ {
+ var segment = path[i];
+
+ if (element.ValueKind is JsonValueKind.Null)
+ {
+ return element;
+ }
+
+ if (segment >= 0)
+ {
+ var selection = _operation.GetSelectionById(segment);
+
+ if (!element.TryGetProperty(selection.ResponseName, out element))
+ {
+ return default;
+ }
+ }
+ else
+ {
+ var index = ~segment;
+
+ if (element.GetArrayLength() <= index)
+ {
+ throw new InvalidOperationException(
+ $"The path segment '[{index}]' does not exist in the data.");
+ }
+
+ element = element[index];
+ }
+ }
+
+ return element;
+ }
+
public void Dispose()
{
if (_disposed)
@@ -1438,6 +1518,44 @@ public void Dispose()
{
memory.Dispose();
}
+
+ _pathPool.Dispose();
+ }
+
+ private void ReturnPathSegments(ReadOnlySpan results)
+ {
+ for (var i = 0; i < results.Length; i++)
+ {
+ ReturnPathSegments(results[i], _seenPaths);
+ }
+
+ _seenPaths.Clear();
+ }
+
+ private void ReturnPathSegments(SourceSchemaResult result)
+ {
+ ReturnPathSegments(result, _seenPaths);
+ _seenPaths.Clear();
+ }
+
+ private void ReturnPathSegments(SourceSchemaResult result, HashSet seen)
+ {
+ ReturnPathSegments(result.Path, seen);
+
+ foreach (var additionalPath in result.AdditionalPaths)
+ {
+ ReturnPathSegments(additionalPath, seen);
+ }
+ }
+
+ private void ReturnPathSegments(CompactPath path, HashSet seen)
+ {
+ var array = path.UnsafeGetBackingArray();
+
+ if (array is not null && seen.Add(array))
+ {
+ _pathPool.Return(array);
+ }
}
private sealed class SingleValueNodeComparer : IEqualityComparer
@@ -1453,27 +1571,17 @@ public int GetHashCode(IValueNode obj)
private static ImmutableArray FinalizeVariableValueSets(
VariableValues[]? variableValueSets,
- List?[]? additionalPaths,
+ ref AdditionalPathAccumulator additionalPaths,
int nextIndex)
{
if (variableValueSets is null || nextIndex == 0)
{
+ additionalPaths.Dispose();
return [];
}
- if (additionalPaths is not null)
- {
- for (var i = 0; i < nextIndex; i++)
- {
- if (additionalPaths[i] is { } paths)
- {
- variableValueSets[i] = variableValueSets[i] with
- {
- AdditionalPaths = [.. paths]
- };
- }
- }
- }
+ additionalPaths.ApplyTo(variableValueSets, nextIndex);
+ additionalPaths.Dispose();
if (variableValueSets.Length != nextIndex)
{
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStorePool.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStorePool.cs
new file mode 100644
index 00000000000..97728ff7996
--- /dev/null
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStorePool.cs
@@ -0,0 +1,192 @@
+using System.Diagnostics;
+using static HotChocolate.Fusion.Execution.Results.FetchResultStorePoolEventSource;
+
+namespace HotChocolate.Fusion.Execution.Results;
+
+internal sealed class FetchResultStorePool : IDisposable
+{
+ private const int MaxCollectTargetRetainLength = 256;
+ private const int MaxDictionaryRetainCapacity = 256;
+
+ private readonly Bucket _bucket;
+
+ public FetchResultStorePool(int[] levels, TimeSpan trimInterval)
+ {
+ Debug.Assert(
+ levels.Length > 0,
+ "Levels must be a non-empty array.");
+ Debug.Assert(
+ trimInterval.TotalSeconds > 10,
+ "Trim interval should be greater than 10 seconds to avoid excessive trimming.");
+
+ _bucket = new Bucket(levels, trimInterval);
+ }
+
+ public FetchResultStore Rent()
+ {
+ var store = _bucket.Rent();
+
+ if (store is null)
+ {
+ store = new FetchResultStore();
+ Log.StoreMiss();
+ }
+ else
+ {
+ Log.StoreHit();
+ }
+
+ return store;
+ }
+
+ public void Return(FetchResultStore store)
+ {
+ store.Clean(MaxCollectTargetRetainLength, MaxDictionaryRetainCapacity);
+
+ if (!_bucket.Return(store))
+ {
+ store.Dispose();
+ Log.StoreDropped();
+ }
+ }
+
+ public void Dispose() => _bucket.Dispose();
+
+ private sealed class Bucket : IDisposable
+ {
+ private readonly FetchResultStore?[] _stores;
+ private readonly int[] _levels;
+ private readonly Timer _trimTimer;
+ private int _currentLevel;
+ private int _inUse;
+ private SpinLock _lock;
+ private int _index;
+
+ internal Bucket(int[] levels, TimeSpan trimInterval)
+ {
+ _stores = new FetchResultStore?[levels[levels.Length - 1]];
+ _levels = levels;
+ _currentLevel = levels.Length - 1;
+ _lock = new SpinLock(Debugger.IsAttached);
+ _trimTimer = new Timer(static b => ((Bucket)b!).Trim(), this, trimInterval, trimInterval);
+ }
+
+ internal FetchResultStore? Rent()
+ {
+ Interlocked.Increment(ref _inUse);
+
+ FetchResultStore? store = null;
+ var lockTaken = false;
+
+ try
+ {
+ _lock.Enter(ref lockTaken);
+
+ if (_index < _stores.Length)
+ {
+ store = _stores[_index];
+ _stores[_index++] = null;
+ }
+ }
+ finally
+ {
+ if (lockTaken)
+ {
+ _lock.Exit(false);
+ }
+ }
+
+ return store;
+ }
+
+ internal bool Return(FetchResultStore store)
+ {
+ Interlocked.Decrement(ref _inUse);
+
+ var lockTaken = false;
+ var accepted = false;
+
+ try
+ {
+ _lock.Enter(ref lockTaken);
+
+ if (_index > 0)
+ {
+ _stores[--_index] = store;
+ accepted = true;
+ }
+ }
+ finally
+ {
+ if (lockTaken)
+ {
+ _lock.Exit(false);
+ }
+ }
+
+ return accepted;
+ }
+
+ private void Trim()
+ {
+ var currentLevel = _currentLevel;
+
+ if (currentLevel == 0)
+ {
+ return;
+ }
+
+ var previousLevel = currentLevel - 1;
+ var previousLimit = _levels[previousLevel];
+
+ if (_inUse > previousLimit)
+ {
+ return;
+ }
+
+ var lockTaken = false;
+
+ try
+ {
+ var currentLimit = _levels[currentLevel];
+
+ _lock.Enter(ref lockTaken);
+
+ for (var i = previousLimit; i < currentLimit; i++)
+ {
+ if (_stores[i] is { } store)
+ {
+ store.Dispose();
+ _stores[i] = null;
+ }
+ }
+
+ if (_index > previousLimit)
+ {
+ _index = previousLimit;
+ }
+ }
+ finally
+ {
+ if (lockTaken)
+ {
+ _lock.Exit(false);
+ }
+ }
+
+ _currentLevel = previousLevel;
+ Log.PoolTrimmed(previousLevel, previousLimit);
+ }
+
+ public void Dispose()
+ {
+ _trimTimer.Dispose();
+
+ for (var i = 0; i < _stores.Length; i++)
+ {
+ _stores[i]?.Dispose();
+ _stores[i] = null;
+ }
+ }
+ }
+}
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStorePoolEventSource.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStorePoolEventSource.cs
new file mode 100644
index 00000000000..6edd588112d
--- /dev/null
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStorePoolEventSource.cs
@@ -0,0 +1,47 @@
+using System.Diagnostics.Tracing;
+
+namespace HotChocolate.Fusion.Execution.Results;
+
+[EventSource(Name = "HotChocolate-Fusion-FetchResultStorePool")]
+internal sealed class FetchResultStorePoolEventSource : EventSource
+{
+ public static readonly FetchResultStorePoolEventSource Log = new();
+
+ private FetchResultStorePoolEventSource() { }
+
+ [Event(1, Level = EventLevel.Verbose, Message = "Store rented from pool (hit)")]
+ public void StoreHit()
+ {
+ if (IsEnabled())
+ {
+ WriteEvent(1);
+ }
+ }
+
+ [Event(2, Level = EventLevel.Informational, Message = "Pool empty, new store allocated (miss)")]
+ public void StoreMiss()
+ {
+ if (IsEnabled())
+ {
+ WriteEvent(2);
+ }
+ }
+
+ [Event(3, Level = EventLevel.Warning, Message = "Pool full, store disposed on return (dropped)")]
+ public void StoreDropped()
+ {
+ if (IsEnabled())
+ {
+ WriteEvent(3);
+ }
+ }
+
+ [Event(4, Level = EventLevel.Informational, Message = "Pool trimmed to level {0} (limit={1})")]
+ public void PoolTrimmed(int level, int limit)
+ {
+ if (IsEnabled())
+ {
+ WriteEvent(4, level, limit);
+ }
+ }
+}
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/ValueCompletion.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/ValueCompletion.cs
index 71937f634a4..3652401abc0 100644
--- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/ValueCompletion.cs
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/ValueCompletion.cs
@@ -59,7 +59,7 @@ public bool BuildResult(
.SetMessage("Unexpected Execution Error")
.Build();
- return BuildErrorResult(target, responseNames, error, target.Path);
+ return BuildErrorResult(target, responseNames, error, target.CompactPath);
}
foreach (var property in source.EnumerateObject())
@@ -102,8 +102,11 @@ public bool BuildErrorResult(
CompositeResultElement target,
ReadOnlySpan responseNames,
IError error,
- Path path)
+ CompactPath path)
{
+ var operation = target.Operation;
+ var errorPath = path.ToPath(operation);
+
foreach (var responseName in responseNames)
{
if (!target.TryGetProperty(responseName, out var fieldResult)
@@ -114,7 +117,7 @@ public bool BuildErrorResult(
var selection = fieldResult.AssertSelection();
var errorWithPath = ErrorBuilder.FromError(error)
- .SetPath(path.Append(responseName))
+ .SetPath(errorPath.Append(responseName))
.AddLocation(selection.SyntaxNodes[0].Node)
.Build();
errorWithPath = _errorHandler.Handle(errorWithPath);
@@ -179,17 +182,19 @@ private bool TryCompleteValue(
IError error;
if (errorTrie?.FindFirstError() is { } errorFromPath)
{
+ var path = target.CompactPath.ToPath(target.Operation);
error = ErrorBuilder.FromError(errorFromPath)
- .SetPath(target.Path)
+ .SetPath(path)
.AddLocation(selection.SyntaxNodes[0].Node)
.Build();
}
else
{
+ var path = target.CompactPath.ToPath(target.Operation);
error = ErrorBuilder.New()
.SetMessage("Cannot return null for non-nullable field.")
.SetCode(ErrorCodes.Execution.NonNullViolation)
- .SetPath(target.Path)
+ .SetPath(path)
.AddLocation(selection.SyntaxNodes[0].Node)
.Build();
}
@@ -217,8 +222,9 @@ private bool TryCompleteValue(
// or with a path below it.
if (errorTrie?.FindFirstError() is { } error)
{
+ var path = target.CompactPath.ToPath(target.Operation);
var errorWithPath = ErrorBuilder.FromError(error)
- .SetPath(target.Path)
+ .SetPath(path)
.AddLocation(selection.SyntaxNodes[0].Node)
.Build();
errorWithPath = _errorHandler.Handle(errorWithPath);
@@ -289,7 +295,7 @@ private bool TryCompleteList(
if (errorTrieForIndex?.Error is { } error)
{
var errorWithPath = ErrorBuilder.FromError(error)
- .SetPath(target.Path.Append(i))
+ .SetPath(target.CompactPath.ToPath(target.Operation, i))
.AddLocation(selection.SyntaxNodes[0].Node)
.Build();
errorWithPath = _errorHandler.Handle(errorWithPath);
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/VariableValues.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/VariableValues.cs
index d572bb18e22..d7dcc86a1c6 100644
--- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/VariableValues.cs
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/VariableValues.cs
@@ -1,12 +1,13 @@
using System.Collections.Immutable;
+using HotChocolate.Fusion.Text.Json;
using HotChocolate.Language;
namespace HotChocolate.Fusion.Execution;
-public sealed record VariableValues(Path Path, ObjectValueNode Values)
+public sealed record VariableValues(CompactPath Path, ObjectValueNode Values)
{
///
/// Gets the additional paths that share the same variable values as the primary .
///
- public ImmutableArray AdditionalPaths { get; init; } = [];
+ public ImmutableArray AdditionalPaths { get; init; } = [];
}
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPath.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPath.cs
new file mode 100644
index 00000000000..bbdee5ffaa3
--- /dev/null
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPath.cs
@@ -0,0 +1,129 @@
+using HotChocolate.Fusion.Execution.Nodes;
+
+namespace HotChocolate.Fusion.Text.Json;
+
+///
+/// Represents a path through a GraphQL result tree using integer segments.
+/// The sign bit distinguishes between the two segment kinds:
+/// positive values are field selection IDs, and negative values are array indices
+/// (stored as the bitwise complement of the index).
+///
+public readonly struct CompactPath : IEquatable
+{
+ ///
+ /// Gets the empty root path.
+ ///
+ public static CompactPath Root => default;
+
+ private readonly int[]? _segments;
+
+ internal CompactPath(int[] segments)
+ => _segments = segments;
+
+ ///
+ /// Gets the path segments as a read-only span.
+ ///
+ public ReadOnlySpan Segments
+ => _segments is null
+ ? ReadOnlySpan.Empty
+ : _segments.AsSpan(1, _segments[0]);
+
+ ///
+ /// Gets the number of segments in the path.
+ ///
+ public int Length => _segments?[0] ?? 0;
+
+ ///
+ /// Gets a value indicating whether this is the root path (i.e. has no segments).
+ ///
+ public bool IsRoot => _segments is null;
+
+ ///
+ /// Gets the segment at the specified index.
+ ///
+ /// The zero-based segment index.
+ public int this[int index] => _segments![index + 1];
+
+ internal int[]? UnsafeGetBackingArray() => _segments;
+
+ ///
+ /// Converts this compact path into a by resolving
+ /// selection IDs to their response names using the given operation.
+ ///
+ /// The operation used to resolve selection IDs.
+ public Path ToPath(Operation operation)
+ {
+ ArgumentNullException.ThrowIfNull(operation);
+
+ var path = Path.Root;
+
+ if (_segments is null)
+ {
+ return path;
+ }
+
+ var length = _segments[0];
+ for (var i = 1; i <= length; i++)
+ {
+ var segment = _segments[i];
+
+ if (segment < 0)
+ {
+ path = path.Append(~segment);
+ }
+ else
+ {
+ path = path.Append(operation.GetSelectionById(segment).ResponseName);
+ }
+ }
+
+ return path;
+ }
+
+ ///
+ /// Converts this compact path into a and appends an array index segment.
+ ///
+ /// The operation used to resolve selection IDs.
+ /// The array index to append.
+ public Path ToPath(Operation operation, int appendIndex)
+ => ToPath(operation).Append(appendIndex);
+
+ ///
+ /// Converts this compact path into a and appends a field name segment.
+ ///
+ /// The operation used to resolve selection IDs.
+ /// The field name to append.
+ public Path ToPath(Operation operation, string appendField)
+ => ToPath(operation).Append(appendField);
+
+ ///
+ public bool Equals(CompactPath other)
+ => Segments.SequenceEqual(other.Segments);
+
+ ///
+ public override bool Equals(object? obj)
+ => obj is CompactPath other && Equals(other);
+
+ ///
+ public override int GetHashCode()
+ {
+ var hashCode = new HashCode();
+
+ if (_segments is not null)
+ {
+ var length = _segments[0];
+ for (var i = 1; i <= length; i++)
+ {
+ hashCode.Add(_segments[i]);
+ }
+ }
+
+ return hashCode.ToHashCode();
+ }
+
+ public static bool operator ==(CompactPath left, CompactPath right)
+ => left.Equals(right);
+
+ public static bool operator !=(CompactPath left, CompactPath right)
+ => !left.Equals(right);
+}
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPathBuilder.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPathBuilder.cs
new file mode 100644
index 00000000000..f99ccc01325
--- /dev/null
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPathBuilder.cs
@@ -0,0 +1,109 @@
+using System.Buffers;
+using System.Diagnostics;
+
+namespace HotChocolate.Fusion.Text.Json;
+
+///
+/// Stack-based builder for . Starts on a caller-supplied
+/// stack buffer and spills to if the path exceeds it.
+///
+internal ref struct CompactPathBuilder
+{
+ private readonly PathSegmentLocalPool? _pool;
+ private Span _span;
+ private int[]? _arrayFromPool;
+ private int _pos;
+
+ public CompactPathBuilder(Span initialBuffer, PathSegmentLocalPool? pool)
+ {
+ Debug.Assert(initialBuffer.Length > 0);
+
+ _span = initialBuffer;
+ _pool = pool;
+ _arrayFromPool = null;
+ _pos = 0;
+ }
+
+ public void Append(int segment)
+ {
+ if (_pos == _span.Length)
+ {
+ Grow();
+ }
+
+ _span[_pos++] = segment;
+ }
+
+ public void AppendField(int selectionId) => Append(selectionId);
+
+ public void AppendIndex(int arrayIndex) => Append(~arrayIndex);
+
+ public CompactPath ToPath()
+ {
+ if (_pos == 0)
+ {
+ ReturnPooledArray();
+ return CompactPath.Root;
+ }
+
+ if (_pool is null)
+ {
+ return ToPathNoPool();
+ }
+
+ // -1 because [0] is reserved for the length
+ if (_pos <= PathSegmentMemory.SegmentArraySize - 1)
+ {
+ var array = _pool.Rent();
+ array[0] = _pos;
+ _span[.._pos].CopyTo(array.AsSpan(1));
+ ReturnPooledArray();
+ return new CompactPath(array);
+ }
+
+ // Overflow: path deeper than 31 — allocate exact-sized array (extremely rare)
+ var overflow = new int[_pos + 1];
+ overflow[0] = _pos;
+ _span[.._pos].CopyTo(overflow.AsSpan(1));
+ ReturnPooledArray();
+ return new CompactPath(overflow);
+ }
+
+ private CompactPath ToPathNoPool()
+ {
+ if (_pos == 0)
+ {
+ ReturnPooledArray();
+ return CompactPath.Root;
+ }
+
+ var result = new int[_pos + 1];
+ result[0] = _pos;
+ _span[.._pos].CopyTo(result.AsSpan(1));
+ ReturnPooledArray();
+ return new CompactPath(result);
+ }
+
+ private void ReturnPooledArray()
+ {
+ if (_arrayFromPool is not null)
+ {
+ ArrayPool.Shared.Return(_arrayFromPool);
+ _arrayFromPool = null;
+ }
+ }
+
+ private void Grow()
+ {
+ var newArray = ArrayPool.Shared.Rent(_span.Length * 2);
+ _span[.._pos].CopyTo(newArray);
+
+ if (_arrayFromPool is not null)
+ {
+ ArrayPool.Shared.Return(_arrayFromPool);
+ }
+
+ _arrayFromPool = newArray;
+ _span = newArray;
+ }
+}
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultDocument.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultDocument.cs
index 1112eb3e30c..1d655dbd993 100644
--- a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultDocument.cs
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultDocument.cs
@@ -13,14 +13,19 @@ public sealed partial class CompositeResultDocument : IDisposable
private readonly List _sources = [];
private readonly Operation _operation;
private readonly ulong _includeFlags;
+ private readonly PathSegmentLocalPool? _pathPool;
internal MetaDb _metaDb;
private bool _disposed;
- public CompositeResultDocument(Operation operation, ulong includeFlags)
+ internal CompositeResultDocument(
+ Operation operation,
+ ulong includeFlags,
+ PathSegmentLocalPool? pathPool = null)
{
_metaDb = MetaDb.CreateForEstimatedRows(Cursor.RowsPerChunk * 8);
_operation = operation;
_includeFlags = includeFlags;
+ _pathPool = pathPool;
Data = CreateObject(Cursor.Zero, operation.RootSelectionSet);
}
@@ -117,15 +122,14 @@ internal int GetPropertyCount(Cursor current)
return _metaDb.GetSizeOrLength(current);
}
- internal Path CreatePath(Cursor current)
+ internal CompactPath CreateCompactPath(Cursor current)
{
// Stop at root via IsRoot flag.
if ((_metaDb.GetFlags(current) & ElementFlags.IsRoot) == ElementFlags.IsRoot)
{
- return Path.Root;
+ return CompactPath.Root;
}
- var cursorIndex = current.Index;
Span chain = stackalloc Cursor[64];
var c = current;
var written = 0;
@@ -148,7 +152,8 @@ internal Path CreatePath(Cursor current)
}
} while (true);
- var path = Path.Root;
+ Span pathBuffer = stackalloc int[32];
+ var path = new CompactPathBuilder(pathBuffer, _pathPool);
var parentTokenType = ElementTokenType.StartObject;
chain = chain[..written];
@@ -160,7 +165,7 @@ internal Path CreatePath(Cursor current)
if (tokenType == ElementTokenType.PropertyName)
{
- path = path.Append(GetSelection(c)!.ResponseName);
+ path.AppendField(GetSelection(c)!.Id);
i--; // skip over the actual value
}
else if (chain.Length - 1 > i)
@@ -173,16 +178,19 @@ internal Path CreatePath(Cursor current)
var absChild = (c.Chunk * Cursor.RowsPerChunk) + c.Row;
var absParent = parentCursor.Chunk * Cursor.RowsPerChunk + parentCursor.Row;
var arrayIndex = absChild - (absParent + 1);
- path = path.Append(arrayIndex);
+ path.AppendIndex(arrayIndex);
}
}
parentTokenType = tokenType;
}
- return path;
+ return path.ToPath();
}
+ internal Path CreatePath(Cursor current)
+ => CreateCompactPath(current).ToPath(_operation);
+
internal CompositeResultElement GetParent(Cursor current)
{
// The null cursor represents the data object, which is the utmost root.
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultElement.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultElement.cs
index 271cd4a11de..652bdbe26b3 100644
--- a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultElement.cs
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultElement.cs
@@ -182,6 +182,19 @@ public bool IsNullOrInvalidated
}
}
+ ///
+ /// Gets the compact path to this element within the result document.
+ ///
+ public CompactPath CompactPath
+ {
+ get
+ {
+ CheckValidInstance();
+
+ return _parent.CreateCompactPath(_cursor);
+ }
+ }
+
///
/// Gets the path to this element within the result document.
///
@@ -191,7 +204,8 @@ public Path Path
{
CheckValidInstance();
- return _parent.CreatePath(_cursor);
+ var path = _parent.CreateCompactPath(_cursor);
+ return path.ToPath(_parent.GetOperation());
}
}
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentLocalPool.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentLocalPool.cs
new file mode 100644
index 00000000000..1e1c2c238d8
--- /dev/null
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentLocalPool.cs
@@ -0,0 +1,102 @@
+using System.Buffers;
+
+namespace HotChocolate.Fusion.Text.Json;
+
+internal sealed class PathSegmentLocalPool : IDisposable
+{
+ private int[]?[] _buffers;
+ private int _index;
+ private int[]?[] _allRented;
+ private int _allRentedCount;
+ private bool _disposed;
+
+ public PathSegmentLocalPool(int initialCapacity = 64)
+ {
+ var capacity = Math.Max(32, initialCapacity);
+
+ _buffers = ArrayPool.Shared.Rent(capacity);
+ _index = 0;
+ _allRented = ArrayPool.Shared.Rent(capacity * 2);
+ _allRentedCount = 0;
+ }
+
+ public int[] Rent()
+ {
+ if (_index > 0)
+ {
+ var array = _buffers[--_index]!;
+ _buffers[_index] = null;
+ return array;
+ }
+
+ var rented = PathSegmentMemory.Rent();
+ TrackRented(rented);
+ return rented;
+ }
+
+ public void Return(int[] array)
+ {
+ if (array.Length != PathSegmentMemory.SegmentArraySize)
+ {
+ return;
+ }
+
+ if (_index == _buffers.Length)
+ {
+ GrowBuffers();
+ }
+
+ _buffers[_index++] = array;
+ }
+
+ private void TrackRented(int[] array)
+ {
+ if (_allRentedCount == _allRented.Length)
+ {
+ GrowAllRented();
+ }
+
+ _allRented[_allRentedCount++] = array;
+ }
+
+ private void GrowBuffers()
+ {
+ var newBuffers = ArrayPool.Shared.Rent(_buffers.Length * 2);
+ _buffers.AsSpan(0, _index).CopyTo(newBuffers);
+ ArrayPool.Shared.Return(_buffers, clearArray: true);
+ _buffers = newBuffers;
+ }
+
+ private void GrowAllRented()
+ {
+ var newAllRented = ArrayPool.Shared.Rent(_allRented.Length * 2);
+ _allRented.AsSpan(0, _allRentedCount).CopyTo(newAllRented);
+ ArrayPool.Shared.Return(_allRented, clearArray: true);
+ _allRented = newAllRented;
+ }
+
+ public void Dispose()
+ {
+ if (_disposed)
+ {
+ return;
+ }
+
+ _disposed = true;
+
+ for (var i = 0; i < _allRentedCount; i++)
+ {
+ PathSegmentMemory.Return(_allRented[i]!);
+ _allRented[i] = null;
+ }
+
+ _allRentedCount = 0;
+ _index = 0;
+
+ ArrayPool.Shared.Return(_buffers, clearArray: true);
+ ArrayPool.Shared.Return(_allRented, clearArray: true);
+
+ _buffers = [];
+ _allRented = [];
+ }
+}
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentMemory.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentMemory.cs
new file mode 100644
index 00000000000..194a511a4e0
--- /dev/null
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentMemory.cs
@@ -0,0 +1,27 @@
+namespace HotChocolate.Fusion.Text.Json;
+
+internal static class PathSegmentMemory
+{
+ private static PathSegmentPool s_pool = new(
+ segmentArraySize: 64,
+ levels: [4096, 8192, 16384],
+ trimInterval: TimeSpan.FromMinutes(5),
+ preAllocate: false);
+
+ public static int SegmentArraySize => s_pool._segmentArraySize;
+
+ public static void Reconfigure(Func factory)
+ {
+ ArgumentNullException.ThrowIfNull(factory);
+
+ var oldPool = Interlocked.Exchange(
+ ref s_pool,
+ factory() ?? throw new InvalidOperationException(
+ "The factory must create a valid pool."));
+ oldPool.Dispose();
+ }
+
+ public static int[] Rent() => s_pool.Rent();
+
+ public static void Return(int[] array) => s_pool.Return(array);
+}
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentPool.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentPool.cs
new file mode 100644
index 00000000000..4a1a77569aa
--- /dev/null
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentPool.cs
@@ -0,0 +1,266 @@
+using System.Diagnostics;
+using static HotChocolate.Fusion.Text.Json.PathSegmentPoolEventSource;
+
+namespace HotChocolate.Fusion.Text.Json;
+
+internal sealed class PathSegmentPool : IDisposable
+{
+ private static int s_nextPoolId;
+ internal readonly int _segmentArraySize;
+ private readonly int _poolId;
+ private readonly int _numberOfArrays;
+ private readonly Bucket _bucket;
+
+ public PathSegmentPool(int segmentArraySize, int[] levels, TimeSpan trimInterval, bool preAllocate)
+ {
+ Debug.Assert(segmentArraySize >= 32);
+ Debug.Assert(
+ levels.Length > 0,
+ "Levels must be a non-empty array.");
+ Debug.Assert(
+ trimInterval.TotalSeconds > 10,
+ "Trim interval should be greater than 10 seconds to avoid excessive trimming.");
+
+ _segmentArraySize = segmentArraySize;
+ _poolId = Interlocked.Increment(ref s_nextPoolId);
+ _numberOfArrays = levels[levels.Length - 1];
+ _bucket = new Bucket(_poolId, segmentArraySize, levels, trimInterval, preAllocate);
+
+ var log = Log;
+ if (log.IsEnabled())
+ {
+ log.PoolCreated(
+ _poolId,
+ _segmentArraySize,
+ _numberOfArrays,
+ (long)_numberOfArrays * _segmentArraySize * sizeof(int));
+ }
+ }
+
+ public int[] Rent()
+ {
+ var log = Log;
+ var buffer = _bucket.Rent();
+
+ if (buffer is null)
+ {
+ buffer = new int[_segmentArraySize];
+
+ if (log.IsEnabled())
+ {
+ log.PoolExhausted(_poolId, _numberOfArrays);
+ }
+ }
+
+ if (log.IsEnabled())
+ {
+ log.SegmentRented(buffer.GetHashCode(), buffer.Length, _poolId, _bucket.InUse);
+ }
+
+ return buffer;
+ }
+
+ public void Return(int[] array)
+ {
+ if (array.Length != _segmentArraySize)
+ {
+ return;
+ }
+
+ var log = Log;
+ var returned = _bucket.Return(array);
+
+ if (log.IsEnabled())
+ {
+ log.SegmentReturned(array.GetHashCode(), array.Length, _poolId, _bucket.InUse);
+ }
+
+ if (!returned && log.IsEnabled())
+ {
+ log.SegmentDropped(array.GetHashCode(), array.Length, _poolId);
+ }
+ }
+
+ public void Dispose() => _bucket.Dispose();
+
+ private sealed class Bucket : IDisposable
+ {
+ private readonly int _poolId;
+ private readonly int _segmentArraySize;
+ private readonly int[]?[] _buffers;
+ private readonly int[] _levels;
+ private readonly Timer _trimTimer;
+ private int _currentLevel;
+ private int _inUse;
+ private SpinLock _lock;
+ private int _index;
+
+ internal Bucket(
+ int poolId,
+ int segmentArraySize,
+ int[] levels,
+ TimeSpan trimInterval,
+ bool preAllocate)
+ {
+ var numberOfBuffers = levels[levels.Length - 1];
+
+ _poolId = poolId;
+ _segmentArraySize = segmentArraySize;
+ _buffers = new int[numberOfBuffers][];
+ _levels = levels;
+ _currentLevel = _levels.Length - 1;
+
+ if (preAllocate)
+ {
+ var stableLevel = levels[0];
+ for (var i = 0; i < stableLevel; i++)
+ {
+ _buffers[i] = new int[_segmentArraySize];
+ }
+ }
+
+ _lock = new SpinLock(Debugger.IsAttached);
+ _index = 0;
+ _inUse = 0;
+
+ _trimTimer = new Timer(static b => ((Bucket)b!).Trim(), this, trimInterval, trimInterval);
+ }
+
+ internal int InUse => _inUse;
+
+ internal int[]? Rent()
+ {
+ Interlocked.Increment(ref _inUse);
+
+ var buffers = _buffers;
+ int[]? buffer = null;
+
+ var lockTaken = false;
+ var allocateBuffer = false;
+
+ try
+ {
+ _lock.Enter(ref lockTaken);
+
+ if (_index < buffers.Length)
+ {
+ buffer = buffers[_index];
+ buffers[_index++] = null;
+ allocateBuffer = buffer == null;
+ }
+ }
+ finally
+ {
+ if (lockTaken)
+ {
+ _lock.Exit(false);
+ }
+ }
+
+ if (allocateBuffer)
+ {
+ buffer = new int[_segmentArraySize];
+
+ var log = Log;
+ if (log.IsEnabled())
+ {
+ log.SegmentAllocated(buffer.GetHashCode(), _segmentArraySize, _poolId);
+ }
+ }
+
+ return buffer;
+ }
+
+ internal bool Return(int[] array)
+ {
+ Interlocked.Decrement(ref _inUse);
+
+ if (array.Length != _segmentArraySize)
+ {
+ return false;
+ }
+
+ var returned = false;
+ var lockTaken = false;
+
+ try
+ {
+ _lock.Enter(ref lockTaken);
+
+ if (_index > 0)
+ {
+ _buffers[--_index] = array;
+ returned = true;
+ }
+ }
+ finally
+ {
+ if (lockTaken)
+ {
+ _lock.Exit(false);
+ }
+ }
+
+ return returned;
+ }
+
+ private void Trim()
+ {
+ var currentLevel = _currentLevel;
+
+ if (currentLevel == 0)
+ {
+ return;
+ }
+
+ var previousLevel = currentLevel - 1;
+ var previousLimit = _levels[previousLevel];
+
+ if (_inUse > previousLimit)
+ {
+ return;
+ }
+
+ var trimmed = 0;
+ var lockTaken = false;
+
+ try
+ {
+ var currentLimit = _levels[currentLevel];
+
+ _lock.Enter(ref lockTaken);
+
+ for (var i = previousLimit; i < currentLimit; i++)
+ {
+ if (_buffers[i] != null)
+ {
+ _buffers[i] = null;
+ trimmed++;
+ }
+ }
+
+ if (_index > previousLimit)
+ {
+ _index = previousLimit;
+ }
+ }
+ finally
+ {
+ if (lockTaken)
+ {
+ _lock.Exit(false);
+ }
+ }
+
+ _currentLevel = previousLevel;
+
+ var log = Log;
+ if (log.IsEnabled())
+ {
+ log.PoolTrimmed(_poolId, trimmed, previousLimit, _inUse);
+ }
+ }
+
+ public void Dispose() => _trimTimer.Dispose();
+ }
+}
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentPoolEventSource.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentPoolEventSource.cs
new file mode 100644
index 00000000000..286bf765ab0
--- /dev/null
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentPoolEventSource.cs
@@ -0,0 +1,95 @@
+using System.Diagnostics.Tracing;
+
+namespace HotChocolate.Fusion.Text.Json;
+
+[EventSource(Name = "HotChocolate-Fusion-PathSegmentPool")]
+internal sealed class PathSegmentPoolEventSource : EventSource
+{
+ public static readonly PathSegmentPoolEventSource Log = new();
+
+ private PathSegmentPoolEventSource() { }
+
+ [Event(
+ eventId: 1,
+ Level = EventLevel.Informational,
+ Message = "Path segment pool created (PoolId={0}, SegmentSize={1}, Arrays={2}, TotalBytes={3})")]
+ public void PoolCreated(int poolId, int segmentSize, int totalArrays, long totalBytes)
+ {
+ if (IsEnabled())
+ {
+ WriteEvent(1, poolId, segmentSize, totalArrays, totalBytes);
+ }
+ }
+
+ [Event(
+ eventId: 2,
+ Level = EventLevel.Verbose,
+ Message = "Segment rented (ArrayId={0}, Length={1}, PoolId={2}, InUse={3})")]
+ public void SegmentRented(int arrayId, int arrayLength, int poolId, int inUse)
+ {
+ if (IsEnabled(EventLevel.Verbose, EventKeywords.None))
+ {
+ WriteEvent(2, arrayId, arrayLength, poolId, inUse);
+ }
+ }
+
+ [Event(
+ eventId: 3,
+ Level = EventLevel.Verbose,
+ Message = "Segment returned (ArrayId={0}, Length={1}, PoolId={2}, InUse={3})")]
+ public void SegmentReturned(int arrayId, int arrayLength, int poolId, int inUse)
+ {
+ if (IsEnabled(EventLevel.Verbose, EventKeywords.None))
+ {
+ WriteEvent(3, arrayId, arrayLength, poolId, inUse);
+ }
+ }
+
+ [Event(
+ eventId: 4,
+ Level = EventLevel.Warning,
+ Message = "Path segment pool exhausted (PoolId={0}, MaxArrays={1})")]
+ public void PoolExhausted(int poolId, int maxArrays)
+ {
+ if (IsEnabled())
+ {
+ WriteEvent(4, poolId, maxArrays);
+ }
+ }
+
+ [Event(
+ eventId: 5,
+ Level = EventLevel.Informational,
+ Message = "Segment dropped - pool full (ArrayId={0}, Length={1}, PoolId={2})")]
+ public void SegmentDropped(int arrayId, int arrayLength, int poolId)
+ {
+ if (IsEnabled())
+ {
+ WriteEvent(5, arrayId, arrayLength, poolId);
+ }
+ }
+
+ [Event(
+ eventId: 6,
+ Level = EventLevel.Informational,
+ Message = "Segment allocated (ArrayId={0}, Length={1}, PoolId={2})")]
+ public void SegmentAllocated(int arrayId, int arrayLength, int poolId)
+ {
+ if (IsEnabled())
+ {
+ WriteEvent(6, arrayId, arrayLength, poolId);
+ }
+ }
+
+ [Event(
+ eventId: 7,
+ Level = EventLevel.Informational,
+ Message = "Path segment pool trimmed (PoolId={0}, Trimmed={1}, Remaining={2}, InUse={3})")]
+ public void PoolTrimmed(int poolId, int trimmed, int remaining, int inUse)
+ {
+ if (IsEnabled())
+ {
+ WriteEvent(7, poolId, trimmed, remaining, inUse);
+ }
+ }
+}
diff --git a/src/HotChocolate/Fusion/test/Fusion.Diagnostics.Tests/__snapshots__/FusionActivityExecutionDiagnosticListenerTests.Source_Schema_Transport_Error.snap b/src/HotChocolate/Fusion/test/Fusion.Diagnostics.Tests/__snapshots__/FusionActivityExecutionDiagnosticListenerTests.Source_Schema_Transport_Error.snap
index 9ccdbfeb3ed..e2812d61e7e 100644
--- a/src/HotChocolate/Fusion/test/Fusion.Diagnostics.Tests/__snapshots__/FusionActivityExecutionDiagnosticListenerTests.Source_Schema_Transport_Error.snap
+++ b/src/HotChocolate/Fusion/test/Fusion.Diagnostics.Tests/__snapshots__/FusionActivityExecutionDiagnosticListenerTests.Source_Schema_Transport_Error.snap
@@ -153,7 +153,7 @@
},
{
"Key": "exception.stacktrace",
- "Value": "System.Net.Http.HttpRequestException: Response status code does not indicate success: 500 (Internal Server Error).\n at System.Net.Http.HttpResponseMessage.EnsureSuccessStatusCode()\n at HotChocolate.Fusion.Transport.Http.GraphQLHttpResponse.ReadAsResultAsync(CancellationToken cancellationToken) in GraphQLHttpResponse.cs:line 292\n at HotChocolate.Fusion.Execution.Clients.SourceSchemaHttpClient.Response.ReadAsResultStreamAsync(CancellationToken cancellationToken)+MoveNext() in SourceSchemaHttpClient.cs:line 577\n at HotChocolate.Fusion.Execution.Clients.SourceSchemaHttpClient.Response.ReadAsResultStreamAsync(CancellationToken cancellationToken)+System.Threading.Tasks.Sources.IValueTaskSource.GetResult()\n at HotChocolate.Fusion.Execution.Nodes.OperationExecutionNode.OnExecuteAsync(OperationPlanContext context, CancellationToken cancellationToken) in OperationExecutionNode.cs:line 159\n at HotChocolate.Fusion.Execution.Nodes.OperationExecutionNode.OnExecuteAsync(OperationPlanContext context, CancellationToken cancellationToken) in OperationExecutionNode.cs:line 159"
+ "Value": "System.Net.Http.HttpRequestException: Response status code does not indicate success: 500 (Internal Server Error).\n at System.Net.Http.HttpResponseMessage.EnsureSuccessStatusCode()\n at HotChocolate.Fusion.Transport.Http.GraphQLHttpResponse.ReadAsResultAsync(CancellationToken cancellationToken) in GraphQLHttpResponse.cs:line 292\n at HotChocolate.Fusion.Execution.Clients.SourceSchemaHttpClient.Response.ReadAsResultStreamAsync(CancellationToken cancellationToken)+MoveNext() in SourceSchemaHttpClient.cs:line 578\n at HotChocolate.Fusion.Execution.Clients.SourceSchemaHttpClient.Response.ReadAsResultStreamAsync(CancellationToken cancellationToken)+System.Threading.Tasks.Sources.IValueTaskSource.GetResult()\n at HotChocolate.Fusion.Execution.Nodes.OperationExecutionNode.OnExecuteAsync(OperationPlanContext context, CancellationToken cancellationToken) in OperationExecutionNode.cs:line 160\n at HotChocolate.Fusion.Execution.Nodes.OperationExecutionNode.OnExecuteAsync(OperationPlanContext context, CancellationToken cancellationToken) in OperationExecutionNode.cs:line 160"
},
{
"Key": "exception.type",
diff --git a/src/HotChocolate/Fusion/test/Fusion.Execution.Tests/Execution/Clients/SourceSchemaRequestDispatcherTests.cs b/src/HotChocolate/Fusion/test/Fusion.Execution.Tests/Execution/Clients/SourceSchemaRequestDispatcherTests.cs
index f360c342e76..64ed1a6c05d 100644
--- a/src/HotChocolate/Fusion/test/Fusion.Execution.Tests/Execution/Clients/SourceSchemaRequestDispatcherTests.cs
+++ b/src/HotChocolate/Fusion/test/Fusion.Execution.Tests/Execution/Clients/SourceSchemaRequestDispatcherTests.cs
@@ -8,6 +8,7 @@
using HotChocolate.Fusion.Execution;
using HotChocolate.Fusion.Execution.Clients;
using HotChocolate.Fusion.Execution.Nodes;
+using HotChocolate.Fusion.Execution.Results;
using HotChocolate.Fusion.Types;
using HotChocolate.Language;
using Microsoft.Extensions.DependencyInjection;
@@ -234,6 +235,9 @@ private static OperationPlanContext CreateContext(ISourceSchemaClient client)
.AddSingleton(
NoopFusionExecutionDiagnosticEvents.Instance)
.AddSingleton(new DefaultErrorHandler([]))
+ .AddSingleton(new FetchResultStorePool(
+ levels: [4, 16, 64],
+ trimInterval: TimeSpan.FromMinutes(5)))
.BuildServiceProvider();
var schemaFeatures = new FeatureCollection();
diff --git a/src/HotChocolate/Utilities/src/Utilities.Buffers/JsonMemory.cs b/src/HotChocolate/Utilities/src/Utilities.Buffers/JsonMemory.cs
index af1e01d783a..e7a996d8c19 100644
--- a/src/HotChocolate/Utilities/src/Utilities.Buffers/JsonMemory.cs
+++ b/src/HotChocolate/Utilities/src/Utilities.Buffers/JsonMemory.cs
@@ -19,7 +19,7 @@ internal static class JsonMemory
arraySize: BufferSize,
[128, 768, 3072],
trimInterval: TimeSpan.FromMinutes(5),
- preAllocate: true);
+ preAllocate: false);
private static readonly ArrayPool s_chunkPool = ArrayPool.Shared;
public static void Reconfigure(Func factory)