From b5bb6ae099a7857cd709f9feb4af05fbca081bc0 Mon Sep 17 00:00:00 2001 From: Michael Staib Date: Fri, 13 Mar 2026 20:50:15 +0100 Subject: [PATCH 01/13] [Fusion] Reduce Path Allocations --- .../Clients/SourceSchemaHttpClient.cs | 16 +- .../Execution/Clients/SourceSchemaResult.cs | 15 +- .../Nodes/OperationBatchExecutionNode.cs | 5 +- .../Execution/Nodes/OperationExecutionNode.cs | 5 +- .../JsonOperationPlanFormatter.cs | 42 +++-- .../Execution/OperationPlanContext.cs | 60 ++++++- .../Execution/Results/FetchResultStore.cs | 155 ++++++++++++++---- .../Execution/Results/ValueCompletion.cs | 20 ++- .../Execution/VariableValues.cs | 5 +- .../Fusion.Execution/Text/Json/CompactPath.cs | 81 +++++++++ .../Text/Json/CompactPathBuilder.cs | 82 +++++++++ .../Text/Json/CompositeResultDocument.cs | 17 +- .../Text/Json/CompositeResultElement.cs | 16 +- 13 files changed, 424 insertions(+), 95 deletions(-) create mode 100644 src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPath.cs create mode 100644 src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPathBuilder.cs diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Clients/SourceSchemaHttpClient.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Clients/SourceSchemaHttpClient.cs index 2ddcd462d8e..077349e73b2 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Clients/SourceSchemaHttpClient.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Clients/SourceSchemaHttpClient.cs @@ -386,19 +386,19 @@ private static int ResolveVariableIndex( private static bool TryGetResultPath( SourceSchemaClientRequest request, int variableIndex, - out Path path, - out ImmutableArray additionalPaths) + out CompactPath path, + out ImmutableArray additionalPaths) { if (request.Variables.Length == 0) { - path = Path.Root; + path = CompactPath.Root; additionalPaths = []; return true; } if ((uint)variableIndex >= (uint)request.Variables.Length) { - path = Path.Root; + path = CompactPath.Root; additionalPaths = []; return false; } @@ -489,8 +489,8 @@ private void WriteResultToChannel( OperationPlanContext context, ExecutionNode node, NodeResponse nodeResponse, - Path path, - ImmutableArray additionalPaths, + CompactPath path, + ImmutableArray additionalPaths, SourceResultDocument document) { var sourceSchemaResult = additionalPaths.IsDefaultOrEmpty @@ -561,7 +561,7 @@ public override async IAsyncEnumerable ReadAsResultStreamAsy { await foreach (var result in response.ReadAsResultStreamAsync().WithCancellation(cancellationToken)) { - var sourceSchemaResult = new SourceSchemaResult(Path.Root, result); + var sourceSchemaResult = new SourceSchemaResult(CompactPath.Root, result); configuration.OnSourceSchemaResult?.Invoke(context, node, sourceSchemaResult); @@ -575,7 +575,7 @@ public override async IAsyncEnumerable ReadAsResultStreamAsy case 0: { var result = await response.ReadAsResultAsync(cancellationToken); - var sourceSchemaResult = new SourceSchemaResult(Path.Root, result); + var sourceSchemaResult = new SourceSchemaResult(CompactPath.Root, result); configuration.OnSourceSchemaResult?.Invoke(context, node, sourceSchemaResult); diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Clients/SourceSchemaResult.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Clients/SourceSchemaResult.cs index 4e729741cb7..0561105e458 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Clients/SourceSchemaResult.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Clients/SourceSchemaResult.cs @@ -26,22 +26,21 @@ public sealed class SourceSchemaResult : IDisposable /// Whether this is the final message in a streaming response. /// Any additional paths where this result should also be merged. public SourceSchemaResult( - Path path, + CompactPath path, SourceResultDocument document, FinalMessage final = FinalMessage.Undefined, - ImmutableArray additionalPaths = default) + ImmutableArray additionalPaths = default) : this(path, document, final, ownsDocument: true, additionalPaths) { } private SourceSchemaResult( - Path path, + CompactPath path, SourceResultDocument document, FinalMessage final, bool ownsDocument, - ImmutableArray additionalPaths) + ImmutableArray additionalPaths) { - ArgumentNullException.ThrowIfNull(path); ArgumentNullException.ThrowIfNull(document); _document = document; @@ -54,13 +53,13 @@ private SourceSchemaResult( /// /// The primary path in the composite result into which this source schema result will be merged. /// - public Path Path { get; } + public CompactPath Path { get; } /// /// Additional paths where this result should also be merged, used when a single source /// schema response satisfies multiple selection sets at different locations. /// - public ImmutableArray AdditionalPaths { get; } + public ImmutableArray AdditionalPaths { get; } /// /// The data element of the source schema response, or an empty element if the @@ -132,7 +131,7 @@ public SourceResultElement Extensions /// of the underlying document. Used internally when the same result needs to be referenced /// at a different location in the composite result. /// - internal SourceSchemaResult WithPath(Path path) + internal SourceSchemaResult WithPath(CompactPath path) => new(path, _document, Final, ownsDocument: false, additionalPaths: []); /// diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/OperationBatchExecutionNode.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/OperationBatchExecutionNode.cs index 29a6d3862ba..194d165d791 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/OperationBatchExecutionNode.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/OperationBatchExecutionNode.cs @@ -3,6 +3,7 @@ using System.Runtime.InteropServices; using HotChocolate.Execution; using HotChocolate.Fusion.Execution.Clients; +using HotChocolate.Fusion.Text.Json; namespace HotChocolate.Fusion.Execution.Nodes; @@ -294,7 +295,7 @@ private static void AddErrors( pathBufferLength += 1 + variables[i].AdditionalPaths.Length; } - var pathBuffer = ArrayPool.Shared.Rent(pathBufferLength); + var pathBuffer = ArrayPool.Shared.Rent(pathBufferLength); try { @@ -315,7 +316,7 @@ private static void AddErrors( finally { pathBuffer.AsSpan(0, pathBufferLength).Clear(); - ArrayPool.Shared.Return(pathBuffer); + ArrayPool.Shared.Return(pathBuffer); } } } diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/OperationExecutionNode.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/OperationExecutionNode.cs index aae561d3d1d..1e976263f18 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/OperationExecutionNode.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/OperationExecutionNode.cs @@ -6,6 +6,7 @@ using HotChocolate.Execution; using HotChocolate.Fusion.Diagnostics; using HotChocolate.Fusion.Execution.Clients; +using HotChocolate.Fusion.Text.Json; namespace HotChocolate.Fusion.Execution.Nodes; @@ -343,7 +344,7 @@ private static void AddErrors( pathBufferLength += 1 + variables[i].AdditionalPaths.Length; } - var pathBuffer = ArrayPool.Shared.Rent(pathBufferLength); + var pathBuffer = ArrayPool.Shared.Rent(pathBufferLength); try { @@ -364,7 +365,7 @@ private static void AddErrors( finally { pathBuffer.AsSpan(0, pathBufferLength).Clear(); - ArrayPool.Shared.Return(pathBuffer); + ArrayPool.Shared.Return(pathBuffer); } } } diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/JsonOperationPlanFormatter.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/JsonOperationPlanFormatter.cs index 956c1d4388d..e03b1d6eace 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/JsonOperationPlanFormatter.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/JsonOperationPlanFormatter.cs @@ -58,7 +58,7 @@ public void Format(IBufferWriter writer, OperationPlan plan, OperationPlan } jsonWriter.WritePropertyName("nodes"); - WriteNodes(jsonWriter, plan.AllNodes, trace); + WriteNodes(jsonWriter, plan.AllNodes, trace, plan.Operation); jsonWriter.WriteEndObject(); } @@ -72,7 +72,7 @@ internal void Format(IBufferWriter writer, Operation operation, ImmutableA WriteOperation(jsonWriter, operation); jsonWriter.WritePropertyName("nodes"); - WriteNodes(jsonWriter, allNodes, null); + WriteNodes(jsonWriter, allNodes, null, operation); jsonWriter.WriteEndObject(); } @@ -101,7 +101,8 @@ private static void WriteOperation( private static void WriteNodes( Utf8JsonWriter jsonWriter, ImmutableArray allNodes, - OperationPlanTrace? trace) + OperationPlanTrace? trace, + Operation operation) { jsonWriter.WriteStartArray(); @@ -113,19 +114,19 @@ private static void WriteNodes( switch (node) { case OperationExecutionNode operationNode: - WriteOperationNode(jsonWriter, operationNode, nodeTrace); + WriteOperationNode(jsonWriter, operationNode, nodeTrace, operation); break; case OperationBatchExecutionNode batchNode: - WriteOperationBatchNode(jsonWriter, batchNode, nodeTrace); + WriteOperationBatchNode(jsonWriter, batchNode, nodeTrace, operation); break; case IntrospectionExecutionNode introspectionNode: - WriteIntrospectionNode(jsonWriter, introspectionNode, nodeTrace); + WriteIntrospectionNode(jsonWriter, introspectionNode, nodeTrace, operation); break; case NodeFieldExecutionNode nodeExecutionNode: - WriteNodeFieldNode(jsonWriter, nodeExecutionNode, nodeTrace); + WriteNodeFieldNode(jsonWriter, nodeExecutionNode, nodeTrace, operation); break; } } @@ -136,7 +137,8 @@ private static void WriteNodes( private static void WriteOperationNode( Utf8JsonWriter jsonWriter, OperationExecutionNode node, - ExecutionNodeTrace? trace) + ExecutionNodeTrace? trace, + Operation operation) { jsonWriter.WriteStartObject(); jsonWriter.WriteNumber("id", node.Id); @@ -229,7 +231,7 @@ private static void WriteOperationNode( jsonWriter.WriteEndArray(); } - TryWriteNodeTrace(jsonWriter, trace); + TryWriteNodeTrace(jsonWriter, trace, operation); jsonWriter.WriteEndObject(); } @@ -237,7 +239,8 @@ private static void WriteOperationNode( private static void WriteOperationBatchNode( Utf8JsonWriter jsonWriter, OperationBatchExecutionNode node, - ExecutionNodeTrace? trace) + ExecutionNodeTrace? trace, + Operation operation) { jsonWriter.WriteStartObject(); jsonWriter.WriteNumber("id", node.Id); @@ -334,7 +337,7 @@ private static void WriteOperationBatchNode( jsonWriter.WriteEndArray(); } - TryWriteNodeTrace(jsonWriter, trace); + TryWriteNodeTrace(jsonWriter, trace, operation); jsonWriter.WriteEndObject(); } @@ -342,7 +345,8 @@ private static void WriteOperationBatchNode( private static void WriteIntrospectionNode( Utf8JsonWriter jsonWriter, IntrospectionExecutionNode node, - ExecutionNodeTrace? trace) + ExecutionNodeTrace? trace, + Operation operation) { jsonWriter.WriteStartObject(); jsonWriter.WriteNumber("id", node.Id); @@ -363,12 +367,16 @@ private static void WriteIntrospectionNode( TryWriteConditions(jsonWriter, node); - TryWriteNodeTrace(jsonWriter, trace); + TryWriteNodeTrace(jsonWriter, trace, operation); jsonWriter.WriteEndObject(); } - private static void WriteNodeFieldNode(Utf8JsonWriter jsonWriter, NodeFieldExecutionNode node, ExecutionNodeTrace? trace) + private static void WriteNodeFieldNode( + Utf8JsonWriter jsonWriter, + NodeFieldExecutionNode node, + ExecutionNodeTrace? trace, + Operation operation) { jsonWriter.WriteStartObject(); jsonWriter.WriteNumber("id", node.Id); @@ -390,12 +398,12 @@ private static void WriteNodeFieldNode(Utf8JsonWriter jsonWriter, NodeFieldExecu TryWriteConditions(jsonWriter, node); - TryWriteNodeTrace(jsonWriter, trace); + TryWriteNodeTrace(jsonWriter, trace, operation); jsonWriter.WriteEndObject(); } - private static void TryWriteNodeTrace(Utf8JsonWriter jsonWriter, ExecutionNodeTrace? trace) + private static void TryWriteNodeTrace(Utf8JsonWriter jsonWriter, ExecutionNodeTrace? trace, Operation operation) { if (trace is not null) { @@ -413,7 +421,7 @@ private static void TryWriteNodeTrace(Utf8JsonWriter jsonWriter, ExecutionNodeTr foreach (var variableSet in trace.VariableSets) { - jsonWriter.WritePropertyName(variableSet.Path.ToString()); + jsonWriter.WritePropertyName(variableSet.Path.ToPath(operation).Print()); WriteObjectValueNode(jsonWriter, variableSet.Values); } diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs index fe6ed344502..b9c5f7b29b4 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs @@ -13,6 +13,7 @@ using HotChocolate.Fusion.Text.Json; using HotChocolate.Fusion.Types; using HotChocolate.Language; +using HotChocolate.Types; using Microsoft.Extensions.DependencyInjection; namespace HotChocolate.Fusion.Execution; @@ -231,7 +232,7 @@ internal ImmutableArray CreateVariableValueSets( } var variableValues = GetPathThroughVariables(forwardedVariables); - return [new VariableValues(Path.Root, new ObjectValueNode(variableValues))]; + return [new VariableValues(CompactPath.Root, new ObjectValueNode(variableValues))]; } else { @@ -253,7 +254,7 @@ internal ImmutableArray CreateVariableValueSets( } var variableValues = GetPathThroughVariables(forwardedVariables); - return [new VariableValues(Path.Root, new ObjectValueNode(variableValues))]; + return [new VariableValues(CompactPath.Root, new ObjectValueNode(variableValues))]; } else { @@ -262,21 +263,56 @@ internal ImmutableArray CreateVariableValueSets( } } - private static Path ToResultPath(SelectionPath selectionSet) + private CompactPath ToResultPath(SelectionPath selectionSet) { - var resultPath = Path.Root; + if (selectionSet.IsRoot) + { + return CompactPath.Root; + } + + Span buffer = stackalloc int[32]; + var builder = new CompactPathBuilder(buffer); + var operation = OperationPlan.Operation; + var currentSelectionSet = operation.RootSelectionSet; + Selection? currentSelection = null; for (var i = 0; i < selectionSet.Length; i++) { var segment = selectionSet[i]; - if (segment.Kind is SelectionPathSegmentKind.Root or SelectionPathSegmentKind.Field) + if (segment.Kind is SelectionPathSegmentKind.Root) { - resultPath = resultPath.Append(segment.Name); + continue; + } + + if (segment.Kind is SelectionPathSegmentKind.InlineFragment) + { + if (currentSelection is null) + { + continue; + } + + var objectType = Schema.Types.GetType(segment.Name); + currentSelectionSet = operation.GetSelectionSet(currentSelection, objectType); + continue; + } + + if (!currentSelectionSet.TryGetSelection(segment.Name, out var selection)) + { + throw new InvalidOperationException( + $"Could not resolve selection path segment '{segment.Name}'."); + } + + builder.AppendField(selection.Id); + currentSelection = selection; + + if (selection.Type.NamedType() is IObjectTypeDefinition objectTypeForSelection) + { + currentSelectionSet = operation.GetSelectionSet(selection, objectTypeForSelection); } } - return resultPath; + return builder.ToPath(); } internal void AddPartialResults( @@ -314,6 +350,16 @@ internal void AddErrors(IError error, ReadOnlySpan responseNames, params } } + internal void AddErrors(IError error, ReadOnlySpan responseNames, ReadOnlySpan paths) + { + var canExecutionContinue = _resultStore.AddErrors(error, responseNames, paths); + + if (!canExecutionContinue) + { + ExecutionState.CancelProcessing(); + } + } + internal PooledArrayWriter CreateRentedBuffer() => _resultStore.CreateRentedBuffer(); diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs index 91b5524efe1..6e2d6b0498c 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs @@ -329,6 +329,47 @@ public bool AddErrors(IError error, ReadOnlySpan responseNames, params R ref var end = ref Unsafe.Add(ref path, paths.Length); var resultData = _result.Data; + while (Unsafe.IsAddressLessThan(ref path, ref end)) + { + if (resultData.IsInvalidated) + { + return false; + } + + var element = path.IsRoot ? resultData : GetStartObjectResult(path); + if (element.IsNullOrInvalidated) + { + goto AddErrors_Next; + } + + var canExecutionContinue = + _valueCompletion.BuildErrorResult( + element, + responseNames, + error, + element.CompactPath); + if (!canExecutionContinue) + { + resultData.Invalidate(); + return false; + } + +AddErrors_Next: + path = ref Unsafe.Add(ref path, 1)!; + } + } + + return true; + } + + public bool AddErrors(IError error, ReadOnlySpan responseNames, ReadOnlySpan paths) + { + lock (_lock) + { + ref var path = ref MemoryMarshal.GetReference(paths); + ref var end = ref Unsafe.Add(ref path, paths.Length); + var resultData = _result.Data; + while (Unsafe.IsAddressLessThan(ref path, ref end)) { if (resultData.IsInvalidated) @@ -364,8 +405,8 @@ public bool AddErrors(IError error, ReadOnlySpan responseNames, params R private bool SaveSafeResult( CompositeResultElement resultData, - Path path, - ReadOnlySpan additionalPaths, + CompactPath path, + ReadOnlySpan additionalPaths, SourceResultElement dataElement, ErrorTrie? errorTrie, ReadOnlySpan responseNames) @@ -388,7 +429,7 @@ private bool SaveSafeResult( private bool SaveSafeResult( CompositeResultElement resultData, - Path path, + CompactPath path, SourceResultElement dataElement, ErrorTrie? errorTrie, ReadOnlySpan responseNames) @@ -624,7 +665,7 @@ private ImmutableArray BuildVariableValueSets( VariableValues[]? variableValueSets = null; Dictionary? seen = null; - List?[]? additionalPaths = null; + List?[]? additionalPaths = null; var nextIndex = 0; foreach (var result in elements) @@ -647,15 +688,15 @@ private ImmutableArray BuildVariableValueSets( if (seen.TryGetValue(variables, out var existingIndex)) { - additionalPaths ??= new List?[elements.Length]; - (additionalPaths[existingIndex] ??= []).Add(result.Path); + additionalPaths ??= new List?[elements.Length]; + (additionalPaths[existingIndex] ??= []).Add(result.CompactPath); continue; } seen[variables] = nextIndex; } - variableValueSets[nextIndex++] = new VariableValues(result.Path, variables); + variableValueSets[nextIndex++] = new VariableValues(result.CompactPath, variables); } if (buffer is not null) @@ -692,7 +733,7 @@ private ImmutableArray BuildVariableValueSetsSingleRequirementFa VariableValues[]? variableValueSets = null; Dictionary? seen = null; Dictionary? seenStrings = null; - List?[]? additionalPaths = null; + List?[]? additionalPaths = null; var nextIndex = 0; var isNonNullRequirement = requirement.Type.Kind is SyntaxKind.NonNullType; @@ -727,8 +768,8 @@ private ImmutableArray BuildVariableValueSetsSingleRequirementFa if (seenStrings is not null && seenStrings.TryGetValue(stringValue, out var existingIndex)) { - additionalPaths ??= new List?[elements.Length]; - (additionalPaths[existingIndex] ??= []).Add(result.Path); + additionalPaths ??= new List?[elements.Length]; + (additionalPaths[existingIndex] ??= []).Add(result.CompactPath); continue; } @@ -743,8 +784,8 @@ private ImmutableArray BuildVariableValueSetsSingleRequirementFa if (seen is not null && seen.TryGetValue(mappedValue, out var existingIndex)) { - additionalPaths ??= new List?[elements.Length]; - (additionalPaths[existingIndex] ??= []).Add(result.Path); + additionalPaths ??= new List?[elements.Length]; + (additionalPaths[existingIndex] ??= []).Add(result.CompactPath); continue; } @@ -753,7 +794,7 @@ private ImmutableArray BuildVariableValueSetsSingleRequirementFa } variableValueSets[nextIndex++] = new VariableValues( - result.Path, + result.CompactPath, new ObjectValueNode([ new ObjectFieldNode( requirement.Key, @@ -771,7 +812,7 @@ private ImmutableArray BuildVariableValueSetsSingleRequirementSl { VariableValues[]? variableValueSets = null; Dictionary? seen = null; - List?[]? additionalPaths = null; + List?[]? additionalPaths = null; var nextIndex = 0; foreach (var result in elements) @@ -799,8 +840,8 @@ private ImmutableArray BuildVariableValueSetsSingleRequirementSl if (seen.TryGetValue(value, out var existingIndex)) { - additionalPaths ??= new List?[elements.Length]; - (additionalPaths[existingIndex] ??= []).Add(result.Path); + additionalPaths ??= new List?[elements.Length]; + (additionalPaths[existingIndex] ??= []).Add(result.CompactPath); continue; } @@ -808,7 +849,7 @@ private ImmutableArray BuildVariableValueSetsSingleRequirementSl } variableValueSets[nextIndex++] = new VariableValues( - result.Path, + result.CompactPath, new ObjectValueNode([new ObjectFieldNode(requirement.Key, value)])); } @@ -850,7 +891,7 @@ private ImmutableArray BuildVariableValueSetsTwoRequirementsFast { VariableValues[]? variableValueSets = null; Dictionary? seen = null; - List?[]? additionalPaths = null; + List?[]? additionalPaths = null; var nextIndex = 0; foreach (var result in elements) @@ -887,8 +928,8 @@ [new TwoValueNodeTuple( if (seen.TryGetValue(key, out var existingIndex)) { - additionalPaths ??= new List?[elements.Length]; - (additionalPaths[existingIndex] ??= []).Add(result.Path); + additionalPaths ??= new List?[elements.Length]; + (additionalPaths[existingIndex] ??= []).Add(result.CompactPath); continue; } @@ -896,7 +937,7 @@ [new TwoValueNodeTuple( } variableValueSets[nextIndex++] = new VariableValues( - result.Path, + result.CompactPath, new ObjectValueNode([ new ObjectFieldNode(requirement1.Key, mappedValue1), new ObjectFieldNode(requirement2.Key, mappedValue2) @@ -914,7 +955,7 @@ private ImmutableArray BuildVariableValueSetsTwoRequirementsSlow { VariableValues[]? variableValueSets = null; Dictionary? seen = null; - List?[]? additionalPaths = null; + List?[]? additionalPaths = null; var nextIndex = 0; foreach (var result in elements) @@ -951,8 +992,8 @@ [new TwoValueNodeTuple( if (seen.TryGetValue(key, out var existingIndex)) { - additionalPaths ??= new List?[elements.Length]; - (additionalPaths[existingIndex] ??= []).Add(result.Path); + additionalPaths ??= new List?[elements.Length]; + (additionalPaths[existingIndex] ??= []).Add(result.CompactPath); continue; } @@ -960,7 +1001,7 @@ [new TwoValueNodeTuple( } variableValueSets[nextIndex++] = new VariableValues( - result.Path, + result.CompactPath, new ObjectValueNode([ new ObjectFieldNode(requirement1.Key, value1), new ObjectFieldNode(requirement2.Key, value2) @@ -1012,7 +1053,7 @@ private ImmutableArray BuildVariableValueSetsThreeRequirementsFa { VariableValues[]? variableValueSets = null; Dictionary? seen = null; - List?[]? additionalPaths = null; + List?[]? additionalPaths = null; var nextIndex = 0; foreach (var result in elements) @@ -1059,8 +1100,8 @@ [new ThreeValueNodeTuple( if (seen.TryGetValue(key, out var existingIndex)) { - additionalPaths ??= new List?[elements.Length]; - (additionalPaths[existingIndex] ??= []).Add(result.Path); + additionalPaths ??= new List?[elements.Length]; + (additionalPaths[existingIndex] ??= []).Add(result.CompactPath); continue; } @@ -1068,7 +1109,7 @@ [new ThreeValueNodeTuple( } variableValueSets[nextIndex++] = new VariableValues( - result.Path, + result.CompactPath, new ObjectValueNode([ new ObjectFieldNode(requirement1.Key, mappedValue1), new ObjectFieldNode(requirement2.Key, mappedValue2), @@ -1088,7 +1129,7 @@ private ImmutableArray BuildVariableValueSetsThreeRequirementsSl { VariableValues[]? variableValueSets = null; Dictionary? seen = null; - List?[]? additionalPaths = null; + List?[]? additionalPaths = null; var nextIndex = 0; foreach (var result in elements) @@ -1135,8 +1176,8 @@ [new ThreeValueNodeTuple( if (seen.TryGetValue(key, out var existingIndex)) { - additionalPaths ??= new List?[elements.Length]; - (additionalPaths[existingIndex] ??= []).Add(result.Path); + additionalPaths ??= new List?[elements.Length]; + (additionalPaths[existingIndex] ??= []).Add(result.CompactPath); continue; } @@ -1144,7 +1185,7 @@ [new ThreeValueNodeTuple( } variableValueSets[nextIndex++] = new VariableValues( - result.Path, + result.CompactPath, new ObjectValueNode([ new ObjectFieldNode(requirement1.Key, value1), new ObjectFieldNode(requirement2.Key, value2), @@ -1385,6 +1426,13 @@ private CompositeResultElement GetStartObjectResult(Path path) return result.ValueKind is JsonValueKind.Object or JsonValueKind.Null ? result : default; } + private CompositeResultElement GetStartObjectResult(CompactPath path) + { + var result = GetStartResult(path); + Debug.Assert(result.ValueKind is JsonValueKind.Object or JsonValueKind.Null or JsonValueKind.Undefined); + return result.ValueKind is JsonValueKind.Object or JsonValueKind.Null ? result : default; + } + private CompositeResultElement GetStartResult(Path path) { if (path.IsRoot) @@ -1421,6 +1469,45 @@ private CompositeResultElement GetStartResult(Path path) $"The path segment '{parent}' does not exist in the data."); } + private CompositeResultElement GetStartResult(CompactPath path) + { + var element = _result.Data; + + for (var i = 0; i < path.Length; i++) + { + var segment = path[i]; + + if (element.ValueKind is JsonValueKind.Null) + { + return element; + } + + if (segment >= 0) + { + var selection = _operation.GetSelectionById(segment); + + if (!element.TryGetProperty(selection.ResponseName, out element)) + { + return default; + } + } + else + { + var index = ~segment; + + if (element.GetArrayLength() <= index) + { + throw new InvalidOperationException( + $"The path segment '[{index}]' does not exist in the data."); + } + + element = element[index]; + } + } + + return element; + } + public void Dispose() { if (_disposed) @@ -1453,7 +1540,7 @@ public int GetHashCode(IValueNode obj) private static ImmutableArray FinalizeVariableValueSets( VariableValues[]? variableValueSets, - List?[]? additionalPaths, + List?[]? additionalPaths, int nextIndex) { if (variableValueSets is null || nextIndex == 0) diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/ValueCompletion.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/ValueCompletion.cs index 71937f634a4..3652401abc0 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/ValueCompletion.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/ValueCompletion.cs @@ -59,7 +59,7 @@ public bool BuildResult( .SetMessage("Unexpected Execution Error") .Build(); - return BuildErrorResult(target, responseNames, error, target.Path); + return BuildErrorResult(target, responseNames, error, target.CompactPath); } foreach (var property in source.EnumerateObject()) @@ -102,8 +102,11 @@ public bool BuildErrorResult( CompositeResultElement target, ReadOnlySpan responseNames, IError error, - Path path) + CompactPath path) { + var operation = target.Operation; + var errorPath = path.ToPath(operation); + foreach (var responseName in responseNames) { if (!target.TryGetProperty(responseName, out var fieldResult) @@ -114,7 +117,7 @@ public bool BuildErrorResult( var selection = fieldResult.AssertSelection(); var errorWithPath = ErrorBuilder.FromError(error) - .SetPath(path.Append(responseName)) + .SetPath(errorPath.Append(responseName)) .AddLocation(selection.SyntaxNodes[0].Node) .Build(); errorWithPath = _errorHandler.Handle(errorWithPath); @@ -179,17 +182,19 @@ private bool TryCompleteValue( IError error; if (errorTrie?.FindFirstError() is { } errorFromPath) { + var path = target.CompactPath.ToPath(target.Operation); error = ErrorBuilder.FromError(errorFromPath) - .SetPath(target.Path) + .SetPath(path) .AddLocation(selection.SyntaxNodes[0].Node) .Build(); } else { + var path = target.CompactPath.ToPath(target.Operation); error = ErrorBuilder.New() .SetMessage("Cannot return null for non-nullable field.") .SetCode(ErrorCodes.Execution.NonNullViolation) - .SetPath(target.Path) + .SetPath(path) .AddLocation(selection.SyntaxNodes[0].Node) .Build(); } @@ -217,8 +222,9 @@ private bool TryCompleteValue( // or with a path below it. if (errorTrie?.FindFirstError() is { } error) { + var path = target.CompactPath.ToPath(target.Operation); var errorWithPath = ErrorBuilder.FromError(error) - .SetPath(target.Path) + .SetPath(path) .AddLocation(selection.SyntaxNodes[0].Node) .Build(); errorWithPath = _errorHandler.Handle(errorWithPath); @@ -289,7 +295,7 @@ private bool TryCompleteList( if (errorTrieForIndex?.Error is { } error) { var errorWithPath = ErrorBuilder.FromError(error) - .SetPath(target.Path.Append(i)) + .SetPath(target.CompactPath.ToPath(target.Operation, i)) .AddLocation(selection.SyntaxNodes[0].Node) .Build(); errorWithPath = _errorHandler.Handle(errorWithPath); diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/VariableValues.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/VariableValues.cs index d572bb18e22..d7dcc86a1c6 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/VariableValues.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/VariableValues.cs @@ -1,12 +1,13 @@ using System.Collections.Immutable; +using HotChocolate.Fusion.Text.Json; using HotChocolate.Language; namespace HotChocolate.Fusion.Execution; -public sealed record VariableValues(Path Path, ObjectValueNode Values) +public sealed record VariableValues(CompactPath Path, ObjectValueNode Values) { /// /// Gets the additional paths that share the same variable values as the primary . /// - public ImmutableArray AdditionalPaths { get; init; } = []; + public ImmutableArray AdditionalPaths { get; init; } = []; } diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPath.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPath.cs new file mode 100644 index 00000000000..0b910de5570 --- /dev/null +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPath.cs @@ -0,0 +1,81 @@ +using HotChocolate.Fusion.Execution.Nodes; + +namespace HotChocolate.Fusion.Text.Json; + +/// +/// A compact, integer-based path representation for the Fusion execution engine. +/// Each segment is either a positive Selection ID (field) or a bitwise-NOT array index (negative). +/// +public readonly struct CompactPath : IEquatable +{ + public static CompactPath Root => default; + + private readonly int[]? _segments; + + internal CompactPath(int[] segments) + => _segments = segments; + + public ReadOnlySpan Segments + => _segments ?? ReadOnlySpan.Empty; + + public int Length => _segments?.Length ?? 0; + + public bool IsRoot => _segments is null; + + public int this[int index] => _segments![index]; + + public Path ToPath(Operation operation) + { + ArgumentNullException.ThrowIfNull(operation); + + var path = Path.Root; + + if (_segments is null) + { + return path; + } + + for (var i = 0; i < _segments.Length; i++) + { + var segment = _segments[i]; + + if (segment < 0) + { + path = path.Append(~segment); + } + else + { + path = path.Append(operation.GetSelectionById(segment).ResponseName); + } + } + + return path; + } + + public Path ToPath(Operation operation, int appendIndex) + => ToPath(operation).Append(appendIndex); + + public Path ToPath(Operation operation, string appendField) + => ToPath(operation).Append(appendField); + + public bool Equals(CompactPath other) + => Segments.SequenceEqual(other.Segments); + + public override bool Equals(object? obj) + => obj is CompactPath other && Equals(other); + + public override int GetHashCode() + { + var hashCode = new HashCode(); + + if (_segments is not null) + { + for (var i = 0; i < _segments.Length; i++) + { + hashCode.Add(_segments[i]); + } + } + + return hashCode.ToHashCode(); + } +} diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPathBuilder.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPathBuilder.cs new file mode 100644 index 00000000000..2a7cfdfb642 --- /dev/null +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPathBuilder.cs @@ -0,0 +1,82 @@ +using System.Buffers; + +namespace HotChocolate.Fusion.Text.Json; + +internal ref struct CompactPathBuilder +{ + private Span _span; + private int[]? _arrayFromPool; + private int _pos; + + public CompactPathBuilder(Span initialBuffer) + { + if (initialBuffer.Length == 0) + { + throw new ArgumentException("The initial buffer cannot be empty.", nameof(initialBuffer)); + } + + _span = initialBuffer; + _arrayFromPool = null; + _pos = 0; + } + + public void Append(int segment) + { + if (_pos == _span.Length) + { + Grow(); + } + + _span[_pos++] = segment; + } + + public void AppendField(int selectionId) + { + ArgumentOutOfRangeException.ThrowIfNegative(selectionId); + + Append(selectionId); + } + + public void AppendIndex(int arrayIndex) + { + ArgumentOutOfRangeException.ThrowIfNegative(arrayIndex); + + Append(~arrayIndex); + } + + public CompactPath ToPath() + { + if (_pos == 0) + { + ReturnPooledArray(); + return CompactPath.Root; + } + + var result = _span[.._pos].ToArray(); + ReturnPooledArray(); + return new CompactPath(result); + } + + private void ReturnPooledArray() + { + if (_arrayFromPool is not null) + { + ArrayPool.Shared.Return(_arrayFromPool); + _arrayFromPool = null; + } + } + + private void Grow() + { + var newArray = ArrayPool.Shared.Rent(_span.Length * 2); + _span[.._pos].CopyTo(newArray); + + if (_arrayFromPool is not null) + { + ArrayPool.Shared.Return(_arrayFromPool); + } + + _arrayFromPool = newArray; + _span = newArray; + } +} diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultDocument.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultDocument.cs index 1112eb3e30c..14e5a924282 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultDocument.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultDocument.cs @@ -117,15 +117,14 @@ internal int GetPropertyCount(Cursor current) return _metaDb.GetSizeOrLength(current); } - internal Path CreatePath(Cursor current) + internal CompactPath CreateCompactPath(Cursor current) { // Stop at root via IsRoot flag. if ((_metaDb.GetFlags(current) & ElementFlags.IsRoot) == ElementFlags.IsRoot) { - return Path.Root; + return CompactPath.Root; } - var cursorIndex = current.Index; Span chain = stackalloc Cursor[64]; var c = current; var written = 0; @@ -148,7 +147,8 @@ internal Path CreatePath(Cursor current) } } while (true); - var path = Path.Root; + Span pathBuffer = stackalloc int[32]; + var path = new CompactPathBuilder(pathBuffer); var parentTokenType = ElementTokenType.StartObject; chain = chain[..written]; @@ -160,7 +160,7 @@ internal Path CreatePath(Cursor current) if (tokenType == ElementTokenType.PropertyName) { - path = path.Append(GetSelection(c)!.ResponseName); + path.AppendField(GetSelection(c)!.Id); i--; // skip over the actual value } else if (chain.Length - 1 > i) @@ -173,16 +173,19 @@ internal Path CreatePath(Cursor current) var absChild = (c.Chunk * Cursor.RowsPerChunk) + c.Row; var absParent = parentCursor.Chunk * Cursor.RowsPerChunk + parentCursor.Row; var arrayIndex = absChild - (absParent + 1); - path = path.Append(arrayIndex); + path.AppendIndex(arrayIndex); } } parentTokenType = tokenType; } - return path; + return path.ToPath(); } + internal Path CreatePath(Cursor current) + => CreateCompactPath(current).ToPath(_operation); + internal CompositeResultElement GetParent(Cursor current) { // The null cursor represents the data object, which is the utmost root. diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultElement.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultElement.cs index 271cd4a11de..652bdbe26b3 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultElement.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultElement.cs @@ -182,6 +182,19 @@ public bool IsNullOrInvalidated } } + /// + /// Gets the compact path to this element within the result document. + /// + public CompactPath CompactPath + { + get + { + CheckValidInstance(); + + return _parent.CreateCompactPath(_cursor); + } + } + /// /// Gets the path to this element within the result document. /// @@ -191,7 +204,8 @@ public Path Path { CheckValidInstance(); - return _parent.CreatePath(_cursor); + var path = _parent.CreateCompactPath(_cursor); + return path.ToPath(_parent.GetOperation()); } } From 173d9dd8ff9014d1fb35471fd3779b360680cf4b Mon Sep 17 00:00:00 2001 From: Michael Staib Date: Sat, 14 Mar 2026 00:18:29 +0100 Subject: [PATCH 02/13] Added more pooling --- .../Execution/FusionOptions.cs | 22 ++ .../Execution/OperationPlanContext.cs | 11 +- .../Results/AdditionalPathAccumulator.cs | 132 ++++++++++++ .../Execution/Results/FetchResultStore.cs | 155 ++++++++------ .../Fusion.Execution/Text/Json/CompactPath.cs | 24 ++- .../Text/Json/CompactPathBuilder.cs | 57 +++-- .../Text/Json/CompositeResultDocument.cs | 9 +- .../Text/Json/PathSegmentLocalPool.cs | 102 +++++++++ .../Text/Json/PathSegmentMemory.cs | 27 +++ .../Text/Json/PathSegmentPool.cs | 198 ++++++++++++++++++ .../src/Utilities.Buffers/JsonMemory.cs | 2 +- 11 files changed, 649 insertions(+), 90 deletions(-) create mode 100644 src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/AdditionalPathAccumulator.cs create mode 100644 src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentLocalPool.cs create mode 100644 src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentMemory.cs create mode 100644 src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentPool.cs diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/FusionOptions.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/FusionOptions.cs index 55d7495d564..7eaca8831dd 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/FusionOptions.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/FusionOptions.cs @@ -80,6 +80,27 @@ public int OperationDocumentCacheSize } } = 256; + /// + /// Gets or sets the initial capacity of the local path segment pool used during result composition. + /// 64 by default. 1 is the minimum. + /// + public int PathSegmentLocalPoolCapacity + { + get; + set + { + ExpectMutableOptions(); + + if (value < 1) + { + throw new ArgumentException( + "The path segment local pool capacity must be at least 1."); + } + + field = value; + } + } = 64; + /// /// Gets or sets the default error handling mode. /// by default. @@ -160,6 +181,7 @@ public FusionOptions Clone() OperationExecutionPlanCacheSize = OperationExecutionPlanCacheSize, OperationExecutionPlanCacheDiagnostics = OperationExecutionPlanCacheDiagnostics, OperationDocumentCacheSize = OperationDocumentCacheSize, + PathSegmentLocalPoolCapacity = PathSegmentLocalPoolCapacity, DefaultErrorHandlingMode = DefaultErrorHandlingMode, LazyInitialization = LazyInitialization, NodeIdSerializerFormat = NodeIdSerializerFormat, diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs index b9c5f7b29b4..3d800c4226b 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs @@ -72,7 +72,8 @@ public OperationPlanContext( errorHandler, operationPlan.Operation, requestContext.ErrorHandlingMode(), - IncludeFlags); + IncludeFlags, + requestContext.Schema.GetOptions().PathSegmentLocalPoolCapacity); _executionState = new ExecutionState(_collectTelemetry, cancellationTokenSource); _sourceSchemaDispatcher = new SourceSchemaRequestDispatcher(this); @@ -271,7 +272,7 @@ private CompactPath ToResultPath(SelectionPath selectionSet) } Span buffer = stackalloc int[32]; - var builder = new CompactPathBuilder(buffer); + var builder = new CompactPathBuilder(buffer, _resultStore._pathPool); var operation = OperationPlan.Operation; var currentSelectionSet = operation.RootSelectionSet; Selection? currentSelection = null; @@ -322,7 +323,11 @@ internal void AddPartialResults( bool containsErrors = true) { var canExecutionContinue = - _resultStore.AddPartialResults(sourcePath, results, responseNames, containsErrors); + _resultStore.AddPartialResults( + sourcePath, + results, + responseNames, + containsErrors); if (!canExecutionContinue) { diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/AdditionalPathAccumulator.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/AdditionalPathAccumulator.cs new file mode 100644 index 00000000000..d5bef11394b --- /dev/null +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/AdditionalPathAccumulator.cs @@ -0,0 +1,132 @@ +using System.Buffers; +using System.Runtime.InteropServices; +using HotChocolate.Fusion.Text.Json; + +namespace HotChocolate.Fusion.Execution.Results; + +/// +/// A flat, allocation-free accumulator for additional CompactPath entries +/// that replaces per-slot List<CompactPath> with ArrayPool-rented buffers. +/// Stores (slotIndex, path) pairs and produces ImmutableArray<CompactPath> +/// per slot via counting sort in ApplyTo. +/// +internal ref struct AdditionalPathAccumulator +{ + private CompactPath[]? _paths; + private int[]? _slotIndices; + private int _count; + + public readonly bool HasEntries => _count > 0; + + public void Add(int slotIndex, CompactPath path) + { + if (_paths is null) + { + _paths = ArrayPool.Shared.Rent(16); + _slotIndices = ArrayPool.Shared.Rent(16); + } + else if (_count == _paths.Length) + { + Grow(); + } + + _paths[_count] = path; + _slotIndices![_count] = slotIndex; + _count++; + } + + public void ApplyTo(VariableValues[] variableValueSets, int slotCount) + { + if (_count == 0) + { + return; + } + + // Count paths per slot. + var counts = slotCount <= 256 + ? stackalloc int[slotCount] + : new int[slotCount]; + + for (var i = 0; i < _count; i++) + { + counts[_slotIndices![i]]++; + } + + // Compute start offsets (exclusive prefix sum). + var offsets = slotCount <= 256 + ? stackalloc int[slotCount] + : new int[slotCount]; + + offsets[0] = 0; + for (var i = 1; i < slotCount; i++) + { + offsets[i] = offsets[i - 1] + counts[i - 1]; + } + + // Scatter paths into sorted order. + var writePos = slotCount <= 256 + ? stackalloc int[slotCount] + : new int[slotCount]; + offsets.CopyTo(writePos); + + var sorted = ArrayPool.Shared.Rent(_count); + + for (var i = 0; i < _count; i++) + { + var idx = _slotIndices![i]; + sorted[writePos[idx]++] = _paths![i]; + } + + // Build ImmutableArray for each non-empty slot from contiguous slices. + for (var slot = 0; slot < slotCount; slot++) + { + if (counts[slot] == 0) + { + continue; + } + + var array = sorted.AsSpan(offsets[slot], counts[slot]).ToArray(); + variableValueSets[slot] = variableValueSets[slot] with + { + AdditionalPaths = ImmutableCollectionsMarshal.AsImmutableArray(array) + }; + } + + sorted.AsSpan(0, _count).Clear(); + ArrayPool.Shared.Return(sorted); + } + + private void Grow() + { + var newSize = _paths!.Length * 2; + + var newPaths = ArrayPool.Shared.Rent(newSize); + _paths.AsSpan(0, _count).CopyTo(newPaths); + _paths.AsSpan(0, _count).Clear(); + ArrayPool.Shared.Return(_paths); + _paths = newPaths; + + var newIndices = ArrayPool.Shared.Rent(newSize); + _slotIndices.AsSpan(0, _count).CopyTo(newIndices); + ArrayPool.Shared.Return(_slotIndices!); + _slotIndices = newIndices; + } + + public void Dispose() + { + if (_paths is not null) + { + _paths.AsSpan(0, _count).Clear(); + ArrayPool.Shared.Return(_paths); + _paths = null; + } + + if (_slotIndices is not null) + { + ArrayPool.Shared.Return(_slotIndices); + _slotIndices = null; + } + + _count = 0; + } +} diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs index 6e2d6b0498c..1cbdf302ef1 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs @@ -33,6 +33,7 @@ internal sealed class FetchResultStore : IDisposable private CompositeResultElement[] _collectTargetA = ArrayPool.Shared.Rent(64); private CompositeResultElement[] _collectTargetB = ArrayPool.Shared.Rent(64); private CompositeResultElement[] _collectTargetCombined = ArrayPool.Shared.Rent(64); + internal readonly PathSegmentLocalPool _pathPool; private CompositeResultDocument _result; private ValueCompletion _valueCompletion; private List? _errors; @@ -43,7 +44,8 @@ public FetchResultStore( IErrorHandler errorHandler, Operation operation, ErrorHandlingMode errorHandlingMode, - ulong includeFlags) + ulong includeFlags, + int pathSegmentLocalPoolCapacity) { ArgumentNullException.ThrowIfNull(schema); ArgumentNullException.ThrowIfNull(operation); @@ -53,8 +55,9 @@ public FetchResultStore( _operation = operation; _errorHandlingMode = errorHandlingMode; _includeFlags = includeFlags; + _pathPool = new PathSegmentLocalPool(pathSegmentLocalPoolCapacity); - _result = new CompositeResultDocument(operation, includeFlags); + _result = new CompositeResultDocument(operation, includeFlags, _pathPool); _valueCompletion = new ValueCompletion( this, @@ -70,7 +73,7 @@ public void Reset() { ObjectDisposedException.ThrowIf(_disposed, this); - _result = new CompositeResultDocument(_operation, _includeFlags); + _result = new CompositeResultDocument(_operation, _includeFlags, _pathPool); _errors?.Clear(); _valueCompletion = new ValueCompletion( @@ -164,14 +167,22 @@ public bool AddPartialResults( { var result = results[i]; - if (!SaveSafeResult( - resultData, - result.Path, - result.AdditionalPaths.AsSpan(), - dataElementsSpan[i], - errorTriesSpan[i], - responseNames)) + var success = SaveSafeResult( + resultData, + result.Path, + result.AdditionalPaths.AsSpan(), + dataElementsSpan[i], + errorTriesSpan[i], + responseNames); + ReturnPathSegments(result); + + if (!success) { + for (var j = i + 1; j < results.Length; j++) + { + ReturnPathSegments(results[j]); + } + return false; } } @@ -213,14 +224,22 @@ private bool AddPartialResultsNoErrors( { var result = results[i]; - if (!SaveSafeResult( - resultData, - result.Path, - result.AdditionalPaths.AsSpan(), - dataElementsSpan[i], - errorTrie: null, - responseNames)) + var success = SaveSafeResult( + resultData, + result.Path, + result.AdditionalPaths.AsSpan(), + dataElementsSpan[i], + errorTrie: null, + responseNames); + ReturnPathSegments(result); + + if (!success) { + for (var j = i + 1; j < results.Length; j++) + { + ReturnPathSegments(results[j]); + } + return false; } } @@ -254,13 +273,15 @@ private bool AddSinglePartialResult( _errors.AddRange(rootErrors); } - return SaveSafeResult( + var success = SaveSafeResult( _result.Data, result.Path, result.AdditionalPaths.AsSpan(), dataElement, errorTrie, responseNames); + ReturnPathSegments(result); + return success; } } @@ -274,13 +295,15 @@ private bool AddSinglePartialResultNoErrors( lock (_lock) { - return SaveSafeResult( + var success = SaveSafeResult( _result.Data, result.Path, result.AdditionalPaths.AsSpan(), dataElement, errorTrie: null, responseNames); + ReturnPathSegments(result); + return success; } } @@ -611,7 +634,7 @@ private ReadOnlySpan CollectTargetElements(SelectionPath // Store potentially grown arrays back. _collectTargetA = current; _collectTargetB = next; - return ReadOnlySpan.Empty; + return []; } } @@ -665,7 +688,7 @@ private ImmutableArray BuildVariableValueSets( VariableValues[]? variableValueSets = null; Dictionary? seen = null; - List?[]? additionalPaths = null; + var additionalPaths = new AdditionalPathAccumulator(); var nextIndex = 0; foreach (var result in elements) @@ -688,8 +711,7 @@ private ImmutableArray BuildVariableValueSets( if (seen.TryGetValue(variables, out var existingIndex)) { - additionalPaths ??= new List?[elements.Length]; - (additionalPaths[existingIndex] ??= []).Add(result.CompactPath); + additionalPaths.Add(existingIndex, result.CompactPath); continue; } @@ -704,7 +726,7 @@ private ImmutableArray BuildVariableValueSets( _memory.Push(buffer); } - return FinalizeVariableValueSets(variableValueSets, additionalPaths, nextIndex); + return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex); } private ImmutableArray BuildVariableValueSetsSingleRequirement( @@ -733,7 +755,7 @@ private ImmutableArray BuildVariableValueSetsSingleRequirementFa VariableValues[]? variableValueSets = null; Dictionary? seen = null; Dictionary? seenStrings = null; - List?[]? additionalPaths = null; + var additionalPaths = new AdditionalPathAccumulator(); var nextIndex = 0; var isNonNullRequirement = requirement.Type.Kind is SyntaxKind.NonNullType; @@ -768,8 +790,7 @@ private ImmutableArray BuildVariableValueSetsSingleRequirementFa if (seenStrings is not null && seenStrings.TryGetValue(stringValue, out var existingIndex)) { - additionalPaths ??= new List?[elements.Length]; - (additionalPaths[existingIndex] ??= []).Add(result.CompactPath); + additionalPaths.Add(existingIndex, result.CompactPath); continue; } @@ -784,8 +805,7 @@ private ImmutableArray BuildVariableValueSetsSingleRequirementFa if (seen is not null && seen.TryGetValue(mappedValue, out var existingIndex)) { - additionalPaths ??= new List?[elements.Length]; - (additionalPaths[existingIndex] ??= []).Add(result.CompactPath); + additionalPaths.Add(existingIndex, result.CompactPath); continue; } @@ -802,7 +822,7 @@ private ImmutableArray BuildVariableValueSetsSingleRequirementFa ])); } - return FinalizeVariableValueSets(variableValueSets, additionalPaths, nextIndex); + return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex); } private ImmutableArray BuildVariableValueSetsSingleRequirementSlowPath( @@ -812,7 +832,7 @@ private ImmutableArray BuildVariableValueSetsSingleRequirementSl { VariableValues[]? variableValueSets = null; Dictionary? seen = null; - List?[]? additionalPaths = null; + var additionalPaths = new AdditionalPathAccumulator(); var nextIndex = 0; foreach (var result in elements) @@ -840,8 +860,7 @@ private ImmutableArray BuildVariableValueSetsSingleRequirementSl if (seen.TryGetValue(value, out var existingIndex)) { - additionalPaths ??= new List?[elements.Length]; - (additionalPaths[existingIndex] ??= []).Add(result.CompactPath); + additionalPaths.Add(existingIndex, result.CompactPath); continue; } @@ -853,7 +872,7 @@ private ImmutableArray BuildVariableValueSetsSingleRequirementSl new ObjectValueNode([new ObjectFieldNode(requirement.Key, value)])); } - return FinalizeVariableValueSets(variableValueSets, additionalPaths, nextIndex); + return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex); } private ImmutableArray BuildVariableValueSetsTwoRequirements( @@ -891,7 +910,7 @@ private ImmutableArray BuildVariableValueSetsTwoRequirementsFast { VariableValues[]? variableValueSets = null; Dictionary? seen = null; - List?[]? additionalPaths = null; + var additionalPaths = new AdditionalPathAccumulator(); var nextIndex = 0; foreach (var result in elements) @@ -928,8 +947,7 @@ [new TwoValueNodeTuple( if (seen.TryGetValue(key, out var existingIndex)) { - additionalPaths ??= new List?[elements.Length]; - (additionalPaths[existingIndex] ??= []).Add(result.CompactPath); + additionalPaths.Add(existingIndex, result.CompactPath); continue; } @@ -944,7 +962,7 @@ [new TwoValueNodeTuple( ])); } - return FinalizeVariableValueSets(variableValueSets, additionalPaths, nextIndex); + return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex); } private ImmutableArray BuildVariableValueSetsTwoRequirementsSlowPath( @@ -955,7 +973,7 @@ private ImmutableArray BuildVariableValueSetsTwoRequirementsSlow { VariableValues[]? variableValueSets = null; Dictionary? seen = null; - List?[]? additionalPaths = null; + var additionalPaths = new AdditionalPathAccumulator(); var nextIndex = 0; foreach (var result in elements) @@ -992,8 +1010,7 @@ [new TwoValueNodeTuple( if (seen.TryGetValue(key, out var existingIndex)) { - additionalPaths ??= new List?[elements.Length]; - (additionalPaths[existingIndex] ??= []).Add(result.CompactPath); + additionalPaths.Add(existingIndex, result.CompactPath); continue; } @@ -1008,7 +1025,7 @@ [new TwoValueNodeTuple( ])); } - return FinalizeVariableValueSets(variableValueSets, additionalPaths, nextIndex); + return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex); } private ImmutableArray BuildVariableValueSetsThreeRequirements( @@ -1053,7 +1070,7 @@ private ImmutableArray BuildVariableValueSetsThreeRequirementsFa { VariableValues[]? variableValueSets = null; Dictionary? seen = null; - List?[]? additionalPaths = null; + var additionalPaths = new AdditionalPathAccumulator(); var nextIndex = 0; foreach (var result in elements) @@ -1100,8 +1117,7 @@ [new ThreeValueNodeTuple( if (seen.TryGetValue(key, out var existingIndex)) { - additionalPaths ??= new List?[elements.Length]; - (additionalPaths[existingIndex] ??= []).Add(result.CompactPath); + additionalPaths.Add(existingIndex, result.CompactPath); continue; } @@ -1117,7 +1133,7 @@ [new ThreeValueNodeTuple( ])); } - return FinalizeVariableValueSets(variableValueSets, additionalPaths, nextIndex); + return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex); } private ImmutableArray BuildVariableValueSetsThreeRequirementsSlowPath( @@ -1129,7 +1145,7 @@ private ImmutableArray BuildVariableValueSetsThreeRequirementsSl { VariableValues[]? variableValueSets = null; Dictionary? seen = null; - List?[]? additionalPaths = null; + var additionalPaths = new AdditionalPathAccumulator(); var nextIndex = 0; foreach (var result in elements) @@ -1176,8 +1192,7 @@ [new ThreeValueNodeTuple( if (seen.TryGetValue(key, out var existingIndex)) { - additionalPaths ??= new List?[elements.Length]; - (additionalPaths[existingIndex] ??= []).Add(result.CompactPath); + additionalPaths.Add(existingIndex, result.CompactPath); continue; } @@ -1193,7 +1208,7 @@ [new ThreeValueNodeTuple( ])); } - return FinalizeVariableValueSets(variableValueSets, additionalPaths, nextIndex); + return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex); } private ObjectValueNode? MapRequirements( @@ -1525,6 +1540,28 @@ public void Dispose() { memory.Dispose(); } + + _pathPool.Dispose(); + } + + private void ReturnPathSegments(SourceSchemaResult result) + { + ReturnPathSegments(result.Path); + + foreach (var additionalPath in result.AdditionalPaths) + { + ReturnPathSegments(additionalPath); + } + } + + private void ReturnPathSegments(CompactPath path) + { + var array = path.UnsafeGetBackingArray(); + + if (array is not null) + { + _pathPool.Return(array); + } } private sealed class SingleValueNodeComparer : IEqualityComparer @@ -1540,27 +1577,17 @@ public int GetHashCode(IValueNode obj) private static ImmutableArray FinalizeVariableValueSets( VariableValues[]? variableValueSets, - List?[]? additionalPaths, + ref AdditionalPathAccumulator additionalPaths, int nextIndex) { if (variableValueSets is null || nextIndex == 0) { + additionalPaths.Dispose(); return []; } - if (additionalPaths is not null) - { - for (var i = 0; i < nextIndex; i++) - { - if (additionalPaths[i] is { } paths) - { - variableValueSets[i] = variableValueSets[i] with - { - AdditionalPaths = [.. paths] - }; - } - } - } + additionalPaths.ApplyTo(variableValueSets, nextIndex); + additionalPaths.Dispose(); if (variableValueSets.Length != nextIndex) { diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPath.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPath.cs index 0b910de5570..862f0600df9 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPath.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPath.cs @@ -5,6 +5,8 @@ namespace HotChocolate.Fusion.Text.Json; /// /// A compact, integer-based path representation for the Fusion execution engine. /// Each segment is either a positive Selection ID (field) or a bitwise-NOT array index (negative). +/// The backing array uses [0] = length encoding: _segments[0] holds the number of segments, +/// and _segments[1..length] hold the actual path segments. /// public readonly struct CompactPath : IEquatable { @@ -16,13 +18,17 @@ internal CompactPath(int[] segments) => _segments = segments; public ReadOnlySpan Segments - => _segments ?? ReadOnlySpan.Empty; + => _segments is null + ? ReadOnlySpan.Empty + : _segments.AsSpan(1, _segments[0]); - public int Length => _segments?.Length ?? 0; + public int Length => _segments?[0] ?? 0; public bool IsRoot => _segments is null; - public int this[int index] => _segments![index]; + public int this[int index] => _segments![index + 1]; + + internal int[]? UnsafeGetBackingArray() => _segments; public Path ToPath(Operation operation) { @@ -35,7 +41,8 @@ public Path ToPath(Operation operation) return path; } - for (var i = 0; i < _segments.Length; i++) + var length = _segments[0]; + for (var i = 1; i <= length; i++) { var segment = _segments[i]; @@ -70,7 +77,8 @@ public override int GetHashCode() if (_segments is not null) { - for (var i = 0; i < _segments.Length; i++) + var length = _segments[0]; + for (var i = 1; i <= length; i++) { hashCode.Add(_segments[i]); } @@ -78,4 +86,10 @@ public override int GetHashCode() return hashCode.ToHashCode(); } + + public static bool operator ==(CompactPath left, CompactPath right) + => left.Equals(right); + + public static bool operator !=(CompactPath left, CompactPath right) + => !left.Equals(right); } diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPathBuilder.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPathBuilder.cs index 2a7cfdfb642..f99ccc01325 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPathBuilder.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPathBuilder.cs @@ -1,21 +1,25 @@ using System.Buffers; +using System.Diagnostics; namespace HotChocolate.Fusion.Text.Json; +/// +/// Stack-based builder for . Starts on a caller-supplied +/// stack buffer and spills to if the path exceeds it. +/// internal ref struct CompactPathBuilder { + private readonly PathSegmentLocalPool? _pool; private Span _span; private int[]? _arrayFromPool; private int _pos; - public CompactPathBuilder(Span initialBuffer) + public CompactPathBuilder(Span initialBuffer, PathSegmentLocalPool? pool) { - if (initialBuffer.Length == 0) - { - throw new ArgumentException("The initial buffer cannot be empty.", nameof(initialBuffer)); - } + Debug.Assert(initialBuffer.Length > 0); _span = initialBuffer; + _pool = pool; _arrayFromPool = null; _pos = 0; } @@ -30,21 +34,42 @@ public void Append(int segment) _span[_pos++] = segment; } - public void AppendField(int selectionId) - { - ArgumentOutOfRangeException.ThrowIfNegative(selectionId); + public void AppendField(int selectionId) => Append(selectionId); - Append(selectionId); - } + public void AppendIndex(int arrayIndex) => Append(~arrayIndex); - public void AppendIndex(int arrayIndex) + public CompactPath ToPath() { - ArgumentOutOfRangeException.ThrowIfNegative(arrayIndex); + if (_pos == 0) + { + ReturnPooledArray(); + return CompactPath.Root; + } - Append(~arrayIndex); + if (_pool is null) + { + return ToPathNoPool(); + } + + // -1 because [0] is reserved for the length + if (_pos <= PathSegmentMemory.SegmentArraySize - 1) + { + var array = _pool.Rent(); + array[0] = _pos; + _span[.._pos].CopyTo(array.AsSpan(1)); + ReturnPooledArray(); + return new CompactPath(array); + } + + // Overflow: path deeper than 31 — allocate exact-sized array (extremely rare) + var overflow = new int[_pos + 1]; + overflow[0] = _pos; + _span[.._pos].CopyTo(overflow.AsSpan(1)); + ReturnPooledArray(); + return new CompactPath(overflow); } - public CompactPath ToPath() + private CompactPath ToPathNoPool() { if (_pos == 0) { @@ -52,7 +77,9 @@ public CompactPath ToPath() return CompactPath.Root; } - var result = _span[.._pos].ToArray(); + var result = new int[_pos + 1]; + result[0] = _pos; + _span[.._pos].CopyTo(result.AsSpan(1)); ReturnPooledArray(); return new CompactPath(result); } diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultDocument.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultDocument.cs index 14e5a924282..1d655dbd993 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultDocument.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultDocument.cs @@ -13,14 +13,19 @@ public sealed partial class CompositeResultDocument : IDisposable private readonly List _sources = []; private readonly Operation _operation; private readonly ulong _includeFlags; + private readonly PathSegmentLocalPool? _pathPool; internal MetaDb _metaDb; private bool _disposed; - public CompositeResultDocument(Operation operation, ulong includeFlags) + internal CompositeResultDocument( + Operation operation, + ulong includeFlags, + PathSegmentLocalPool? pathPool = null) { _metaDb = MetaDb.CreateForEstimatedRows(Cursor.RowsPerChunk * 8); _operation = operation; _includeFlags = includeFlags; + _pathPool = pathPool; Data = CreateObject(Cursor.Zero, operation.RootSelectionSet); } @@ -148,7 +153,7 @@ internal CompactPath CreateCompactPath(Cursor current) } while (true); Span pathBuffer = stackalloc int[32]; - var path = new CompactPathBuilder(pathBuffer); + var path = new CompactPathBuilder(pathBuffer, _pathPool); var parentTokenType = ElementTokenType.StartObject; chain = chain[..written]; diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentLocalPool.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentLocalPool.cs new file mode 100644 index 00000000000..1e1c2c238d8 --- /dev/null +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentLocalPool.cs @@ -0,0 +1,102 @@ +using System.Buffers; + +namespace HotChocolate.Fusion.Text.Json; + +internal sealed class PathSegmentLocalPool : IDisposable +{ + private int[]?[] _buffers; + private int _index; + private int[]?[] _allRented; + private int _allRentedCount; + private bool _disposed; + + public PathSegmentLocalPool(int initialCapacity = 64) + { + var capacity = Math.Max(32, initialCapacity); + + _buffers = ArrayPool.Shared.Rent(capacity); + _index = 0; + _allRented = ArrayPool.Shared.Rent(capacity * 2); + _allRentedCount = 0; + } + + public int[] Rent() + { + if (_index > 0) + { + var array = _buffers[--_index]!; + _buffers[_index] = null; + return array; + } + + var rented = PathSegmentMemory.Rent(); + TrackRented(rented); + return rented; + } + + public void Return(int[] array) + { + if (array.Length != PathSegmentMemory.SegmentArraySize) + { + return; + } + + if (_index == _buffers.Length) + { + GrowBuffers(); + } + + _buffers[_index++] = array; + } + + private void TrackRented(int[] array) + { + if (_allRentedCount == _allRented.Length) + { + GrowAllRented(); + } + + _allRented[_allRentedCount++] = array; + } + + private void GrowBuffers() + { + var newBuffers = ArrayPool.Shared.Rent(_buffers.Length * 2); + _buffers.AsSpan(0, _index).CopyTo(newBuffers); + ArrayPool.Shared.Return(_buffers, clearArray: true); + _buffers = newBuffers; + } + + private void GrowAllRented() + { + var newAllRented = ArrayPool.Shared.Rent(_allRented.Length * 2); + _allRented.AsSpan(0, _allRentedCount).CopyTo(newAllRented); + ArrayPool.Shared.Return(_allRented, clearArray: true); + _allRented = newAllRented; + } + + public void Dispose() + { + if (_disposed) + { + return; + } + + _disposed = true; + + for (var i = 0; i < _allRentedCount; i++) + { + PathSegmentMemory.Return(_allRented[i]!); + _allRented[i] = null; + } + + _allRentedCount = 0; + _index = 0; + + ArrayPool.Shared.Return(_buffers, clearArray: true); + ArrayPool.Shared.Return(_allRented, clearArray: true); + + _buffers = []; + _allRented = []; + } +} diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentMemory.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentMemory.cs new file mode 100644 index 00000000000..5bc028182a1 --- /dev/null +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentMemory.cs @@ -0,0 +1,27 @@ +namespace HotChocolate.Fusion.Text.Json; + +internal static class PathSegmentMemory +{ + private static PathSegmentPool s_pool = new( + segmentArraySize: 64, + levels: [64, 256, 1024], + trimInterval: TimeSpan.FromMinutes(5), + preAllocate: false); + + public static int SegmentArraySize => s_pool._segmentArraySize; + + public static void Reconfigure(Func factory) + { + ArgumentNullException.ThrowIfNull(factory); + + var oldPool = Interlocked.Exchange( + ref s_pool, + factory() ?? throw new InvalidOperationException( + "The factory must create a valid pool.")); + oldPool.Dispose(); + } + + public static int[] Rent() => s_pool.Rent(); + + public static void Return(int[] array) => s_pool.Return(array); +} diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentPool.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentPool.cs new file mode 100644 index 00000000000..eace6ebd8fa --- /dev/null +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentPool.cs @@ -0,0 +1,198 @@ +using System.Diagnostics; + +namespace HotChocolate.Fusion.Text.Json; + +internal sealed class PathSegmentPool : IDisposable +{ + internal readonly int _segmentArraySize; + private readonly Bucket _bucket; + + public PathSegmentPool(int segmentArraySize, int[] levels, TimeSpan trimInterval, bool preAllocate) + { + Debug.Assert(segmentArraySize >= 32); + Debug.Assert( + levels.Length > 0, + "Levels must be a non-empty array."); + Debug.Assert( + trimInterval.TotalSeconds > 10, + "Trim interval should be greater than 10 seconds to avoid excessive trimming."); + + _segmentArraySize = segmentArraySize; + _bucket = new Bucket(segmentArraySize, levels, trimInterval, preAllocate); + } + + public int[] Rent() + { + return _bucket.Rent() ?? new int[_segmentArraySize]; + } + + public void Return(int[] array) + { + if (array.Length != _segmentArraySize) + { + return; + } + + _bucket.Return(array); + } + + public void Dispose() => _bucket.Dispose(); + + private sealed class Bucket : IDisposable + { + private readonly int _segmentArraySize; + private readonly int[]?[] _buffers; + private readonly int[] _levels; + private readonly Timer _trimTimer; + private int _currentLevel; + private int _inUse; + private SpinLock _lock; + private int _index; + + internal Bucket( + int segmentArraySize, + int[] levels, + TimeSpan trimInterval, + bool preAllocate) + { + var numberOfBuffers = levels[levels.Length - 1]; + + _segmentArraySize = segmentArraySize; + _buffers = new int[numberOfBuffers][]; + _levels = levels; + _currentLevel = _levels.Length - 1; + + if (preAllocate) + { + var stableLevel = levels[0]; + for (var i = 0; i < stableLevel; i++) + { + _buffers[i] = new int[_segmentArraySize]; + } + } + + _lock = new SpinLock(Debugger.IsAttached); + _index = 0; + _inUse = 0; + + _trimTimer = new Timer(static b => ((Bucket)b!).Trim(), this, trimInterval, trimInterval); + } + + internal int[]? Rent() + { + Interlocked.Increment(ref _inUse); + + var buffers = _buffers; + int[]? buffer = null; + + var lockTaken = false; + var allocateBuffer = false; + + try + { + _lock.Enter(ref lockTaken); + + if (_index < buffers.Length) + { + buffer = buffers[_index]; + buffers[_index++] = null; + allocateBuffer = buffer == null; + } + } + finally + { + if (lockTaken) + { + _lock.Exit(false); + } + } + + if (allocateBuffer) + { + buffer = new int[_segmentArraySize]; + } + + return buffer; + } + + internal void Return(int[] array) + { + Interlocked.Decrement(ref _inUse); + + if (array.Length != _segmentArraySize) + { + return; + } + + var lockTaken = false; + + try + { + _lock.Enter(ref lockTaken); + + if (_index > 0) + { + _buffers[--_index] = array; + } + } + finally + { + if (lockTaken) + { + _lock.Exit(false); + } + } + } + + private void Trim() + { + var currentLevel = _currentLevel; + + if (currentLevel == 0) + { + return; + } + + var previousLevel = currentLevel - 1; + var previousLimit = _levels[previousLevel]; + + if (_inUse > previousLimit) + { + return; + } + + var lockTaken = false; + + try + { + var currentLimit = _levels[currentLevel]; + + _lock.Enter(ref lockTaken); + + for (var i = previousLimit; i < currentLimit; i++) + { + if (_buffers[i] != null) + { + _buffers[i] = null; + } + } + + if (_index > previousLimit) + { + _index = previousLimit; + } + } + finally + { + if (lockTaken) + { + _lock.Exit(false); + } + } + + _currentLevel = previousLevel; + } + + public void Dispose() => _trimTimer.Dispose(); + } +} diff --git a/src/HotChocolate/Utilities/src/Utilities.Buffers/JsonMemory.cs b/src/HotChocolate/Utilities/src/Utilities.Buffers/JsonMemory.cs index af1e01d783a..e7a996d8c19 100644 --- a/src/HotChocolate/Utilities/src/Utilities.Buffers/JsonMemory.cs +++ b/src/HotChocolate/Utilities/src/Utilities.Buffers/JsonMemory.cs @@ -19,7 +19,7 @@ internal static class JsonMemory arraySize: BufferSize, [128, 768, 3072], trimInterval: TimeSpan.FromMinutes(5), - preAllocate: true); + preAllocate: false); private static readonly ArrayPool s_chunkPool = ArrayPool.Shared; public static void Reconfigure(Func factory) From 5791170e6fefcfa4c77aed6e1f324aa03bdeff90 Mon Sep 17 00:00:00 2001 From: Michael Staib Date: Sat, 14 Mar 2026 08:47:56 +0100 Subject: [PATCH 03/13] Fix pool return --- .../Execution/Results/FetchResultStore.cs | 97 +++++++++++-------- ...onTests.Source_Schema_Transport_Error.snap | 2 +- 2 files changed, 60 insertions(+), 39 deletions(-) diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs index 1cbdf302ef1..ebead55f991 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs @@ -174,15 +174,9 @@ public bool AddPartialResults( dataElementsSpan[i], errorTriesSpan[i], responseNames); - ReturnPathSegments(result); if (!success) { - for (var j = i + 1; j < results.Length; j++) - { - ReturnPathSegments(results[j]); - } - return false; } } @@ -192,6 +186,7 @@ public bool AddPartialResults( } finally { + ReturnPathSegments(results); dataElementsSpan.Clear(); errorTriesSpan.Clear(); ArrayPool.Shared.Return(dataElements); @@ -231,15 +226,9 @@ private bool AddPartialResultsNoErrors( dataElementsSpan[i], errorTrie: null, responseNames); - ReturnPathSegments(result); if (!success) { - for (var j = i + 1; j < results.Length; j++) - { - ReturnPathSegments(results[j]); - } - return false; } } @@ -249,6 +238,7 @@ private bool AddPartialResultsNoErrors( } finally { + ReturnPathSegments(results); dataElementsSpan.Clear(); ArrayPool.Shared.Return(dataElements); } @@ -265,23 +255,28 @@ private bool AddSinglePartialResult( var dataElement = GetDataElement(sourcePath, result.Data); var errorTrie = GetErrorTrie(sourcePath, errors?.Trie); - lock (_lock) + try { - if (errors?.RootErrors is { Length: > 0 } rootErrors) + lock (_lock) { - _errors ??= []; - _errors.AddRange(rootErrors); - } + if (errors?.RootErrors is { Length: > 0 } rootErrors) + { + _errors ??= []; + _errors.AddRange(rootErrors); + } - var success = SaveSafeResult( - _result.Data, - result.Path, - result.AdditionalPaths.AsSpan(), - dataElement, - errorTrie, - responseNames); + return SaveSafeResult( + _result.Data, + result.Path, + result.AdditionalPaths.AsSpan(), + dataElement, + errorTrie, + responseNames); + } + } + finally + { ReturnPathSegments(result); - return success; } } @@ -293,17 +288,22 @@ private bool AddSinglePartialResultNoErrors( _memory.Push(result); var dataElement = GetDataElement(sourcePath, result.Data); - lock (_lock) + try + { + lock (_lock) + { + return SaveSafeResult( + _result.Data, + result.Path, + result.AdditionalPaths.AsSpan(), + dataElement, + errorTrie: null, + responseNames); + } + } + finally { - var success = SaveSafeResult( - _result.Data, - result.Path, - result.AdditionalPaths.AsSpan(), - dataElement, - errorTrie: null, - responseNames); ReturnPathSegments(result); - return success; } } @@ -1544,23 +1544,44 @@ public void Dispose() _pathPool.Dispose(); } + private void ReturnPathSegments(ReadOnlySpan results) + { + HashSet? seen = null; + + for (var i = 0; i < results.Length; i++) + { + ReturnPathSegments(results[i], ref seen); + } + } + private void ReturnPathSegments(SourceSchemaResult result) { - ReturnPathSegments(result.Path); + HashSet? seen = null; + ReturnPathSegments(result, ref seen); + } + + private void ReturnPathSegments(SourceSchemaResult result, ref HashSet? seen) + { + ReturnPathSegments(result.Path, ref seen); foreach (var additionalPath in result.AdditionalPaths) { - ReturnPathSegments(additionalPath); + ReturnPathSegments(additionalPath, ref seen); } } - private void ReturnPathSegments(CompactPath path) + private void ReturnPathSegments(CompactPath path, ref HashSet? seen) { var array = path.UnsafeGetBackingArray(); if (array is not null) { - _pathPool.Return(array); + seen ??= new HashSet(System.Collections.Generic.ReferenceEqualityComparer.Instance); + + if (seen.Add(array)) + { + _pathPool.Return(array); + } } } diff --git a/src/HotChocolate/Fusion/test/Fusion.Diagnostics.Tests/__snapshots__/QueryInstrumentationTests.Source_Schema_Transport_Error.snap b/src/HotChocolate/Fusion/test/Fusion.Diagnostics.Tests/__snapshots__/QueryInstrumentationTests.Source_Schema_Transport_Error.snap index 4a93931c9fa..2432344201c 100644 --- a/src/HotChocolate/Fusion/test/Fusion.Diagnostics.Tests/__snapshots__/QueryInstrumentationTests.Source_Schema_Transport_Error.snap +++ b/src/HotChocolate/Fusion/test/Fusion.Diagnostics.Tests/__snapshots__/QueryInstrumentationTests.Source_Schema_Transport_Error.snap @@ -112,7 +112,7 @@ }, { "Key": "exception.stacktrace", - "Value": "System.Net.Http.HttpRequestException: Response status code does not indicate success: 500 (Internal Server Error).\n at System.Net.Http.HttpResponseMessage.EnsureSuccessStatusCode()\n at HotChocolate.Fusion.Transport.Http.GraphQLHttpResponse.ReadAsResultAsync(CancellationToken cancellationToken) in GraphQLHttpResponse.cs:line 292\n at HotChocolate.Fusion.Execution.Clients.SourceSchemaHttpClient.Response.ReadAsResultStreamAsync(CancellationToken cancellationToken)+MoveNext() in SourceSchemaHttpClient.cs:line 577\n at HotChocolate.Fusion.Execution.Clients.SourceSchemaHttpClient.Response.ReadAsResultStreamAsync(CancellationToken cancellationToken)+System.Threading.Tasks.Sources.IValueTaskSource.GetResult()\n at HotChocolate.Fusion.Execution.Nodes.OperationExecutionNode.OnExecuteAsync(OperationPlanContext context, CancellationToken cancellationToken) in OperationExecutionNode.cs:line 159\n at HotChocolate.Fusion.Execution.Nodes.OperationExecutionNode.OnExecuteAsync(OperationPlanContext context, CancellationToken cancellationToken) in OperationExecutionNode.cs:line 159" + "Value": "System.Net.Http.HttpRequestException: Response status code does not indicate success: 500 (Internal Server Error).\n at System.Net.Http.HttpResponseMessage.EnsureSuccessStatusCode()\n at HotChocolate.Fusion.Transport.Http.GraphQLHttpResponse.ReadAsResultAsync(CancellationToken cancellationToken) in GraphQLHttpResponse.cs:line 292\n at HotChocolate.Fusion.Execution.Clients.SourceSchemaHttpClient.Response.ReadAsResultStreamAsync(CancellationToken cancellationToken)+MoveNext() in SourceSchemaHttpClient.cs:line 577\n at HotChocolate.Fusion.Execution.Clients.SourceSchemaHttpClient.Response.ReadAsResultStreamAsync(CancellationToken cancellationToken)+System.Threading.Tasks.Sources.IValueTaskSource.GetResult()\n at HotChocolate.Fusion.Execution.Nodes.OperationExecutionNode.OnExecuteAsync(OperationPlanContext context, CancellationToken cancellationToken) in OperationExecutionNode.cs:line 160\n at HotChocolate.Fusion.Execution.Nodes.OperationExecutionNode.OnExecuteAsync(OperationPlanContext context, CancellationToken cancellationToken) in OperationExecutionNode.cs:line 160" }, { "Key": "exception.message", From 4b6aa9776b980f4a00e43dcf4a4557044ca77b33 Mon Sep 17 00:00:00 2001 From: Michael Staib Date: Sun, 15 Mar 2026 11:49:33 +0100 Subject: [PATCH 04/13] optimization and fixes --- .../Execution/OperationPlanContext.cs | 4 +- .../Execution/Results/FetchResultStore.cs | 51 ++++++++++++------- 2 files changed, 35 insertions(+), 20 deletions(-) diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs index 3d800c4226b..a1ce73034ec 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs @@ -272,7 +272,9 @@ private CompactPath ToResultPath(SelectionPath selectionSet) } Span buffer = stackalloc int[32]; - var builder = new CompactPathBuilder(buffer, _resultStore._pathPool); + // This helper can run concurrently across nodes; avoid using the request-local + // pool here since that pool is synchronized through FetchResultStore's lock. + var builder = new CompactPathBuilder(buffer, pool: null); var operation = OperationPlan.Operation; var currentSelectionSet = operation.RootSelectionSet; Selection? currentSelection = null; diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs index ebead55f991..e32a03d3dcf 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs @@ -34,6 +34,7 @@ internal sealed class FetchResultStore : IDisposable private CompositeResultElement[] _collectTargetB = ArrayPool.Shared.Rent(64); private CompositeResultElement[] _collectTargetCombined = ArrayPool.Shared.Rent(64); internal readonly PathSegmentLocalPool _pathPool; + private HashSet? _seenPaths; private CompositeResultDocument _result; private ValueCompletion _valueCompletion; private List? _errors; @@ -186,7 +187,11 @@ public bool AddPartialResults( } finally { - ReturnPathSegments(results); + lock (_lock) + { + ReturnPathSegments(results); + } + dataElementsSpan.Clear(); errorTriesSpan.Clear(); ArrayPool.Shared.Return(dataElements); @@ -238,7 +243,11 @@ private bool AddPartialResultsNoErrors( } finally { - ReturnPathSegments(results); + lock (_lock) + { + ReturnPathSegments(results); + } + dataElementsSpan.Clear(); ArrayPool.Shared.Return(dataElements); } @@ -276,7 +285,10 @@ private bool AddSinglePartialResult( } finally { - ReturnPathSegments(result); + lock (_lock) + { + ReturnPathSegments(result); + } } } @@ -303,7 +315,10 @@ private bool AddSinglePartialResultNoErrors( } finally { - ReturnPathSegments(result); + lock (_lock) + { + ReturnPathSegments(result); + } } } @@ -1546,42 +1561,40 @@ public void Dispose() private void ReturnPathSegments(ReadOnlySpan results) { - HashSet? seen = null; + _seenPaths ??= new HashSet(ReferenceEqualityComparer.Instance); for (var i = 0; i < results.Length; i++) { - ReturnPathSegments(results[i], ref seen); + ReturnPathSegments(results[i], _seenPaths); } + + _seenPaths.Clear(); } private void ReturnPathSegments(SourceSchemaResult result) { - HashSet? seen = null; - ReturnPathSegments(result, ref seen); + _seenPaths ??= new HashSet(ReferenceEqualityComparer.Instance); + ReturnPathSegments(result, _seenPaths); + _seenPaths.Clear(); } - private void ReturnPathSegments(SourceSchemaResult result, ref HashSet? seen) + private void ReturnPathSegments(SourceSchemaResult result, HashSet seen) { - ReturnPathSegments(result.Path, ref seen); + ReturnPathSegments(result.Path, seen); foreach (var additionalPath in result.AdditionalPaths) { - ReturnPathSegments(additionalPath, ref seen); + ReturnPathSegments(additionalPath, seen); } } - private void ReturnPathSegments(CompactPath path, ref HashSet? seen) + private void ReturnPathSegments(CompactPath path, HashSet seen) { var array = path.UnsafeGetBackingArray(); - if (array is not null) + if (array is not null && seen.Add(array)) { - seen ??= new HashSet(System.Collections.Generic.ReferenceEqualityComparer.Instance); - - if (seen.Add(array)) - { - _pathPool.Return(array); - } + _pathPool.Return(array); } } From 5db84dbe312b6603ad39049b7248d172476fb23b Mon Sep 17 00:00:00 2001 From: Michael Staib Date: Sun, 15 Mar 2026 13:43:44 +0100 Subject: [PATCH 05/13] Result Store Pooling --- ...colateFusionServiceCollectionExtensions.cs | 9 + .../Execution/OperationPlanContext.cs | 9 +- .../Results/FetchResultStore.Pooling.cs | 145 +++++++++++++ .../Execution/Results/FetchResultStore.cs | 168 ++++++--------- .../Execution/Results/FetchResultStorePool.cs | 192 ++++++++++++++++++ .../FetchResultStorePoolEventSource.cs | 47 +++++ .../SourceSchemaRequestDispatcherTests.cs | 4 + 7 files changed, 468 insertions(+), 106 deletions(-) create mode 100644 src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.Pooling.cs create mode 100644 src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStorePool.cs create mode 100644 src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStorePoolEventSource.cs diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/DependencyInjection/HotChocolateFusionServiceCollectionExtensions.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/DependencyInjection/HotChocolateFusionServiceCollectionExtensions.cs index 847f14a71ce..3a1fafc422b 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/DependencyInjection/HotChocolateFusionServiceCollectionExtensions.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/DependencyInjection/HotChocolateFusionServiceCollectionExtensions.cs @@ -3,6 +3,7 @@ using HotChocolate.Fusion.Configuration; using HotChocolate.Fusion.Execution; using HotChocolate.Fusion.Execution.Clients; +using HotChocolate.Fusion.Execution.Results; using HotChocolate.Language; using Microsoft.Extensions.DependencyInjection.Extensions; using Microsoft.Extensions.ObjectPool; @@ -89,10 +90,18 @@ private static DefaultFusionGatewayBuilder CreateBuilder( var builder = new DefaultFusionGatewayBuilder(services, name); builder.AddDocumentCache(); + builder.AddFetchResultStorePool(); builder.UseDefaultPipeline(); return builder; } + private static void AddFetchResultStorePool(this IFusionGatewayBuilder builder) + => builder.ConfigureSchemaServices( + static (_, s) => s.TryAddSingleton( + new FetchResultStorePool( + levels: [4, 16, 64], + trimInterval: TimeSpan.FromMinutes(5)))); + private static IFusionGatewayBuilder AddDocumentCache(this IFusionGatewayBuilder builder) { builder.Services.TryAddKeyedSingleton( diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs index a1ce73034ec..0355b794873 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs @@ -28,6 +28,7 @@ public sealed class OperationPlanContext : IFeatureProvider, IAsyncDisposable private readonly Uri?[] _transportUris; private readonly string?[] _transportContentTypes; private readonly IFusionExecutionDiagnosticEvents _diagnosticEvents; + private readonly FetchResultStorePool _resultStorePool; private readonly FetchResultStore _resultStore; private readonly ExecutionState _executionState; private readonly SourceSchemaRequestDispatcher _sourceSchemaDispatcher; @@ -67,7 +68,9 @@ public OperationPlanContext( _diagnosticEvents = requestContext.Schema.Services.GetRequiredService(); var errorHandler = requestContext.Schema.Services.GetRequiredService(); - _resultStore = new FetchResultStore( + _resultStorePool = requestContext.Schema.Services.GetRequiredService(); + _resultStore = _resultStorePool.Rent(); + _resultStore.Initialize( requestContext.Schema, errorHandler, operationPlan.Operation, @@ -499,7 +502,9 @@ public async ValueTask DisposeAsync() _disposed = true; DisposeNodeState(); _sourceSchemaDispatcher.Abort(); - _resultStore.Dispose(); + + _resultStorePool.Return(_resultStore); + await _clientScope.DisposeAsync(); } } diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.Pooling.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.Pooling.cs new file mode 100644 index 00000000000..acb2c2f1f2d --- /dev/null +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.Pooling.cs @@ -0,0 +1,145 @@ +using System.Buffers; +using HotChocolate.Execution; +using HotChocolate.Fusion.Execution.Nodes; +using HotChocolate.Fusion.Text.Json; +using HotChocolate.Language; + +namespace HotChocolate.Fusion.Execution.Results; + +internal sealed partial class FetchResultStore +{ + /// + /// Initializes the for a new request. + /// + public void Initialize( + ISchemaDefinition schema, + IErrorHandler errorHandler, + Operation operation, + ErrorHandlingMode errorHandlingMode, + ulong includeFlags, + int pathSegmentLocalPoolCapacity) + { + ArgumentNullException.ThrowIfNull(schema); + ArgumentNullException.ThrowIfNull(operation); + + _schema = schema; + _errorHandler = errorHandler; + _operation = operation; + _errorHandlingMode = errorHandlingMode; + _includeFlags = includeFlags; + _disposed = false; + + _pathPool ??= new PathSegmentLocalPool(pathSegmentLocalPoolCapacity); + _result = new CompositeResultDocument(operation, includeFlags, _pathPool); + + _valueCompletion = new ValueCompletion( + this, + _schema, + _errorHandler, + _errorHandlingMode, + maxDepth: 32); + + _memory.Push(_result); + } + + public void Reset() + { + ObjectDisposedException.ThrowIf(_disposed, this); + + _result = new CompositeResultDocument(_operation, _includeFlags, _pathPool); + _errors?.Clear(); + + _valueCompletion = new ValueCompletion( + this, + _schema, + _errorHandler, + _errorHandlingMode, + maxDepth: 32); + + _memory.Push(_result); + } + + /// + /// Cleans the store for return to the pool. + /// Releases per-request state while retaining reusable buffers. + /// + internal void Clean(int maxCollectTargetRetainLength, int maxDictionaryRetainCapacity) + { + // drain and dispose per-request memory + while (_memory.TryPop(out var memory)) + { + memory.Dispose(); + } + + // return path segments to global pool and reset local pool + _pathPool.Dispose(); + _pathPool = null!; + + // clear errors + _errors?.Clear(); + + // clear collect target arrays to unroot CompositeResultDocument references; + // if they grew too large during a burst, swap them for smaller ones. + TrimOrClearBuffer(ref _collectTargetA, maxCollectTargetRetainLength); + TrimOrClearBuffer(ref _collectTargetB, maxCollectTargetRetainLength); + TrimOrClearBuffer(ref _collectTargetCombined, maxCollectTargetRetainLength); + + // clear dictionaries/hashsets; drop oversized ones. + TrimOrClear(ref _seenPaths, maxDictionaryRetainCapacity, ReferenceEqualityComparer.Instance); + TrimOrClear(ref _seenStrings, maxDictionaryRetainCapacity, StringComparer.Ordinal); + TrimOrClear(ref _seenValueNodes, maxDictionaryRetainCapacity, SingleValueNodeComparer.Instance); + TrimOrClear(ref _seenTwoValueTuples, maxDictionaryRetainCapacity, TwoValueNodeTupleComparer.Instance); + TrimOrClear(ref _seenThreeValueTuples, maxDictionaryRetainCapacity, ThreeValueNodeTupleComparer.Instance); + + // null out per-request references + _result = default!; + _valueCompletion = default!; + _schema = default!; + _errorHandler = default!; + _operation = default!; + } + + private static void TrimOrClearBuffer(ref CompositeResultElement[] buffer, int maxRetainLength) + { + if (buffer.Length > maxRetainLength) + { + ArrayPool.Shared.Return(buffer, clearArray: true); + buffer = ArrayPool.Shared.Rent(64); + } + else + { + buffer.AsSpan().Clear(); + } + } + + private static void TrimOrClear( + ref HashSet set, + int maxRetainCapacity, + IEqualityComparer comparer) + { + if (set.Count > maxRetainCapacity) + { + set = new HashSet(comparer); + } + else + { + set.Clear(); + } + } + + private static void TrimOrClear( + ref Dictionary dict, + int maxRetainCapacity, + IEqualityComparer comparer) + where TKey : notnull + { + if (dict.Count > maxRetainCapacity) + { + dict = new Dictionary(comparer); + } + else + { + dict.Clear(); + } + } +} diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs index e32a03d3dcf..af0902d5a39 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs @@ -17,76 +17,33 @@ namespace HotChocolate.Fusion.Execution.Results; -internal sealed class FetchResultStore : IDisposable +internal sealed partial class FetchResultStore : IDisposable { #if NET9_0_OR_GREATER private readonly Lock _lock = new(); #else private readonly object _lock = new(); #endif - private readonly ISchemaDefinition _schema; - private readonly IErrorHandler _errorHandler; - private readonly Operation _operation; - private readonly ErrorHandlingMode _errorHandlingMode; - private readonly ulong _includeFlags; private readonly ConcurrentStack _memory = []; + private ISchemaDefinition _schema = default!; + private IErrorHandler _errorHandler = default!; + private Operation _operation = default!; + private ErrorHandlingMode _errorHandlingMode; + private ulong _includeFlags; private CompositeResultElement[] _collectTargetA = ArrayPool.Shared.Rent(64); private CompositeResultElement[] _collectTargetB = ArrayPool.Shared.Rent(64); private CompositeResultElement[] _collectTargetCombined = ArrayPool.Shared.Rent(64); - internal readonly PathSegmentLocalPool _pathPool; - private HashSet? _seenPaths; - private CompositeResultDocument _result; - private ValueCompletion _valueCompletion; + private PathSegmentLocalPool _pathPool = default!; + private HashSet _seenPaths = new(ReferenceEqualityComparer.Instance); + private Dictionary _seenStrings = new(StringComparer.Ordinal); + private Dictionary _seenValueNodes = new(SingleValueNodeComparer.Instance); + private Dictionary _seenTwoValueTuples = new(TwoValueNodeTupleComparer.Instance); + private Dictionary _seenThreeValueTuples = new(ThreeValueNodeTupleComparer.Instance); + private CompositeResultDocument _result = default!; + private ValueCompletion _valueCompletion = default!; private List? _errors; private bool _disposed; - public FetchResultStore( - ISchemaDefinition schema, - IErrorHandler errorHandler, - Operation operation, - ErrorHandlingMode errorHandlingMode, - ulong includeFlags, - int pathSegmentLocalPoolCapacity) - { - ArgumentNullException.ThrowIfNull(schema); - ArgumentNullException.ThrowIfNull(operation); - - _schema = schema; - _errorHandler = errorHandler; - _operation = operation; - _errorHandlingMode = errorHandlingMode; - _includeFlags = includeFlags; - _pathPool = new PathSegmentLocalPool(pathSegmentLocalPoolCapacity); - - _result = new CompositeResultDocument(operation, includeFlags, _pathPool); - - _valueCompletion = new ValueCompletion( - this, - _schema, - _errorHandler, - _errorHandlingMode, - maxDepth: 32); - - _memory.Push(_result); - } - - public void Reset() - { - ObjectDisposedException.ThrowIf(_disposed, this); - - _result = new CompositeResultDocument(_operation, _includeFlags, _pathPool); - _errors?.Clear(); - - _valueCompletion = new ValueCompletion( - this, - _schema, - _errorHandler, - _errorHandlingMode, - maxDepth: 32); - - _memory.Push(_result); - } - public CompositeResultDocument Result => _result; public IReadOnlyList? Errors => _errors; @@ -768,8 +725,6 @@ private ImmutableArray BuildVariableValueSetsSingleRequirementFa ref PooledArrayWriter? buffer) { VariableValues[]? variableValueSets = null; - Dictionary? seen = null; - Dictionary? seenStrings = null; var additionalPaths = new AdditionalPathAccumulator(); var nextIndex = 0; var isNonNullRequirement = requirement.Type.Kind is SyntaxKind.NonNullType; @@ -802,30 +757,26 @@ private ImmutableArray BuildVariableValueSetsSingleRequirementFa { var stringValue = value.AssertString(); - if (seenStrings is not null - && seenStrings.TryGetValue(stringValue, out var existingIndex)) + if (_seenStrings.TryGetValue(stringValue, out var existingIndex)) { additionalPaths.Add(existingIndex, result.CompactPath); continue; } mappedValue = ResultDataMapper.GetStringValueNode(stringValue); - seenStrings ??= new Dictionary(elements.Length, StringComparer.Ordinal); - seenStrings[stringValue] = nextIndex; + _seenStrings[stringValue] = nextIndex; } else { mappedValue = ResultDataMapper.MapLeafValue(value, ref buffer); - if (seen is not null - && seen.TryGetValue(mappedValue, out var existingIndex)) + if (_seenValueNodes.TryGetValue(mappedValue, out var existingIndex)) { additionalPaths.Add(existingIndex, result.CompactPath); continue; } - seen ??= new Dictionary(elements.Length, SingleValueNodeComparer.Instance); - seen[mappedValue] = nextIndex; + _seenValueNodes[mappedValue] = nextIndex; } variableValueSets[nextIndex++] = new VariableValues( @@ -837,6 +788,8 @@ private ImmutableArray BuildVariableValueSetsSingleRequirementFa ])); } + _seenStrings.Clear(); + _seenValueNodes.Clear(); return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex); } @@ -846,9 +799,9 @@ private ImmutableArray BuildVariableValueSetsSingleRequirementSl ref PooledArrayWriter? buffer) { VariableValues[]? variableValueSets = null; - Dictionary? seen = null; var additionalPaths = new AdditionalPathAccumulator(); var nextIndex = 0; + var seeded = false; foreach (var result in elements) { @@ -868,18 +821,19 @@ private ImmutableArray BuildVariableValueSetsSingleRequirementSl if (nextIndex > 0) { - seen ??= new Dictionary(elements.Length, SingleValueNodeComparer.Instance) + if (!seeded) { - [variableValueSets[0].Values.Fields[0].Value] = 0 - }; + _seenValueNodes[variableValueSets[0].Values.Fields[0].Value] = 0; + seeded = true; + } - if (seen.TryGetValue(value, out var existingIndex)) + if (_seenValueNodes.TryGetValue(value, out var existingIndex)) { additionalPaths.Add(existingIndex, result.CompactPath); continue; } - seen[value] = nextIndex; + _seenValueNodes[value] = nextIndex; } variableValueSets[nextIndex++] = new VariableValues( @@ -887,6 +841,7 @@ private ImmutableArray BuildVariableValueSetsSingleRequirementSl new ObjectValueNode([new ObjectFieldNode(requirement.Key, value)])); } + _seenValueNodes.Clear(); return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex); } @@ -924,9 +879,9 @@ private ImmutableArray BuildVariableValueSetsTwoRequirementsFast ref PooledArrayWriter? buffer) { VariableValues[]? variableValueSets = null; - Dictionary? seen = null; var additionalPaths = new AdditionalPathAccumulator(); var nextIndex = 0; + var seeded = false; foreach (var result in elements) { @@ -953,20 +908,21 @@ private ImmutableArray BuildVariableValueSetsTwoRequirementsFast if (nextIndex > 0) { - seen ??= new Dictionary(elements.Length, TwoValueNodeTupleComparer.Instance) + if (!seeded) { - [new TwoValueNodeTuple( + _seenTwoValueTuples[new TwoValueNodeTuple( variableValueSets[0].Values.Fields[0].Value, - variableValueSets[0].Values.Fields[1].Value)] = 0 - }; + variableValueSets[0].Values.Fields[1].Value)] = 0; + seeded = true; + } - if (seen.TryGetValue(key, out var existingIndex)) + if (_seenTwoValueTuples.TryGetValue(key, out var existingIndex)) { additionalPaths.Add(existingIndex, result.CompactPath); continue; } - seen[key] = nextIndex; + _seenTwoValueTuples[key] = nextIndex; } variableValueSets[nextIndex++] = new VariableValues( @@ -977,6 +933,7 @@ [new TwoValueNodeTuple( ])); } + _seenTwoValueTuples.Clear(); return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex); } @@ -987,9 +944,9 @@ private ImmutableArray BuildVariableValueSetsTwoRequirementsSlow ref PooledArrayWriter? buffer) { VariableValues[]? variableValueSets = null; - Dictionary? seen = null; var additionalPaths = new AdditionalPathAccumulator(); var nextIndex = 0; + var seeded = false; foreach (var result in elements) { @@ -1016,20 +973,21 @@ private ImmutableArray BuildVariableValueSetsTwoRequirementsSlow if (nextIndex > 0) { - seen ??= new Dictionary(elements.Length, TwoValueNodeTupleComparer.Instance) + if (!seeded) { - [new TwoValueNodeTuple( + _seenTwoValueTuples[new TwoValueNodeTuple( variableValueSets[0].Values.Fields[0].Value, - variableValueSets[0].Values.Fields[1].Value)] = 0 - }; + variableValueSets[0].Values.Fields[1].Value)] = 0; + seeded = true; + } - if (seen.TryGetValue(key, out var existingIndex)) + if (_seenTwoValueTuples.TryGetValue(key, out var existingIndex)) { additionalPaths.Add(existingIndex, result.CompactPath); continue; } - seen[key] = nextIndex; + _seenTwoValueTuples[key] = nextIndex; } variableValueSets[nextIndex++] = new VariableValues( @@ -1040,6 +998,7 @@ [new TwoValueNodeTuple( ])); } + _seenTwoValueTuples.Clear(); return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex); } @@ -1084,9 +1043,9 @@ private ImmutableArray BuildVariableValueSetsThreeRequirementsFa ref PooledArrayWriter? buffer) { VariableValues[]? variableValueSets = null; - Dictionary? seen = null; var additionalPaths = new AdditionalPathAccumulator(); var nextIndex = 0; + var seeded = false; foreach (var result in elements) { @@ -1122,21 +1081,22 @@ private ImmutableArray BuildVariableValueSetsThreeRequirementsFa if (nextIndex > 0) { - seen ??= new Dictionary(elements.Length, ThreeValueNodeTupleComparer.Instance) + if (!seeded) { - [new ThreeValueNodeTuple( + _seenThreeValueTuples[new ThreeValueNodeTuple( variableValueSets[0].Values.Fields[0].Value, variableValueSets[0].Values.Fields[1].Value, - variableValueSets[0].Values.Fields[2].Value)] = 0 - }; + variableValueSets[0].Values.Fields[2].Value)] = 0; + seeded = true; + } - if (seen.TryGetValue(key, out var existingIndex)) + if (_seenThreeValueTuples.TryGetValue(key, out var existingIndex)) { additionalPaths.Add(existingIndex, result.CompactPath); continue; } - seen[key] = nextIndex; + _seenThreeValueTuples[key] = nextIndex; } variableValueSets[nextIndex++] = new VariableValues( @@ -1148,6 +1108,7 @@ [new ThreeValueNodeTuple( ])); } + _seenThreeValueTuples.Clear(); return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex); } @@ -1159,9 +1120,9 @@ private ImmutableArray BuildVariableValueSetsThreeRequirementsSl ref PooledArrayWriter? buffer) { VariableValues[]? variableValueSets = null; - Dictionary? seen = null; var additionalPaths = new AdditionalPathAccumulator(); var nextIndex = 0; + var seeded = false; foreach (var result in elements) { @@ -1197,21 +1158,22 @@ private ImmutableArray BuildVariableValueSetsThreeRequirementsSl if (nextIndex > 0) { - seen ??= new Dictionary(elements.Length, ThreeValueNodeTupleComparer.Instance) + if (!seeded) { - [new ThreeValueNodeTuple( + _seenThreeValueTuples[new ThreeValueNodeTuple( variableValueSets[0].Values.Fields[0].Value, variableValueSets[0].Values.Fields[1].Value, - variableValueSets[0].Values.Fields[2].Value)] = 0 - }; + variableValueSets[0].Values.Fields[2].Value)] = 0; + seeded = true; + } - if (seen.TryGetValue(key, out var existingIndex)) + if (_seenThreeValueTuples.TryGetValue(key, out var existingIndex)) { additionalPaths.Add(existingIndex, result.CompactPath); continue; } - seen[key] = nextIndex; + _seenThreeValueTuples[key] = nextIndex; } variableValueSets[nextIndex++] = new VariableValues( @@ -1223,6 +1185,7 @@ [new ThreeValueNodeTuple( ])); } + _seenThreeValueTuples.Clear(); return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex); } @@ -1561,8 +1524,6 @@ public void Dispose() private void ReturnPathSegments(ReadOnlySpan results) { - _seenPaths ??= new HashSet(ReferenceEqualityComparer.Instance); - for (var i = 0; i < results.Length; i++) { ReturnPathSegments(results[i], _seenPaths); @@ -1573,7 +1534,6 @@ private void ReturnPathSegments(ReadOnlySpan results) private void ReturnPathSegments(SourceSchemaResult result) { - _seenPaths ??= new HashSet(ReferenceEqualityComparer.Instance); ReturnPathSegments(result, _seenPaths); _seenPaths.Clear(); } diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStorePool.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStorePool.cs new file mode 100644 index 00000000000..97728ff7996 --- /dev/null +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStorePool.cs @@ -0,0 +1,192 @@ +using System.Diagnostics; +using static HotChocolate.Fusion.Execution.Results.FetchResultStorePoolEventSource; + +namespace HotChocolate.Fusion.Execution.Results; + +internal sealed class FetchResultStorePool : IDisposable +{ + private const int MaxCollectTargetRetainLength = 256; + private const int MaxDictionaryRetainCapacity = 256; + + private readonly Bucket _bucket; + + public FetchResultStorePool(int[] levels, TimeSpan trimInterval) + { + Debug.Assert( + levels.Length > 0, + "Levels must be a non-empty array."); + Debug.Assert( + trimInterval.TotalSeconds > 10, + "Trim interval should be greater than 10 seconds to avoid excessive trimming."); + + _bucket = new Bucket(levels, trimInterval); + } + + public FetchResultStore Rent() + { + var store = _bucket.Rent(); + + if (store is null) + { + store = new FetchResultStore(); + Log.StoreMiss(); + } + else + { + Log.StoreHit(); + } + + return store; + } + + public void Return(FetchResultStore store) + { + store.Clean(MaxCollectTargetRetainLength, MaxDictionaryRetainCapacity); + + if (!_bucket.Return(store)) + { + store.Dispose(); + Log.StoreDropped(); + } + } + + public void Dispose() => _bucket.Dispose(); + + private sealed class Bucket : IDisposable + { + private readonly FetchResultStore?[] _stores; + private readonly int[] _levels; + private readonly Timer _trimTimer; + private int _currentLevel; + private int _inUse; + private SpinLock _lock; + private int _index; + + internal Bucket(int[] levels, TimeSpan trimInterval) + { + _stores = new FetchResultStore?[levels[levels.Length - 1]]; + _levels = levels; + _currentLevel = levels.Length - 1; + _lock = new SpinLock(Debugger.IsAttached); + _trimTimer = new Timer(static b => ((Bucket)b!).Trim(), this, trimInterval, trimInterval); + } + + internal FetchResultStore? Rent() + { + Interlocked.Increment(ref _inUse); + + FetchResultStore? store = null; + var lockTaken = false; + + try + { + _lock.Enter(ref lockTaken); + + if (_index < _stores.Length) + { + store = _stores[_index]; + _stores[_index++] = null; + } + } + finally + { + if (lockTaken) + { + _lock.Exit(false); + } + } + + return store; + } + + internal bool Return(FetchResultStore store) + { + Interlocked.Decrement(ref _inUse); + + var lockTaken = false; + var accepted = false; + + try + { + _lock.Enter(ref lockTaken); + + if (_index > 0) + { + _stores[--_index] = store; + accepted = true; + } + } + finally + { + if (lockTaken) + { + _lock.Exit(false); + } + } + + return accepted; + } + + private void Trim() + { + var currentLevel = _currentLevel; + + if (currentLevel == 0) + { + return; + } + + var previousLevel = currentLevel - 1; + var previousLimit = _levels[previousLevel]; + + if (_inUse > previousLimit) + { + return; + } + + var lockTaken = false; + + try + { + var currentLimit = _levels[currentLevel]; + + _lock.Enter(ref lockTaken); + + for (var i = previousLimit; i < currentLimit; i++) + { + if (_stores[i] is { } store) + { + store.Dispose(); + _stores[i] = null; + } + } + + if (_index > previousLimit) + { + _index = previousLimit; + } + } + finally + { + if (lockTaken) + { + _lock.Exit(false); + } + } + + _currentLevel = previousLevel; + Log.PoolTrimmed(previousLevel, previousLimit); + } + + public void Dispose() + { + _trimTimer.Dispose(); + + for (var i = 0; i < _stores.Length; i++) + { + _stores[i]?.Dispose(); + _stores[i] = null; + } + } + } +} diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStorePoolEventSource.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStorePoolEventSource.cs new file mode 100644 index 00000000000..6edd588112d --- /dev/null +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStorePoolEventSource.cs @@ -0,0 +1,47 @@ +using System.Diagnostics.Tracing; + +namespace HotChocolate.Fusion.Execution.Results; + +[EventSource(Name = "HotChocolate-Fusion-FetchResultStorePool")] +internal sealed class FetchResultStorePoolEventSource : EventSource +{ + public static readonly FetchResultStorePoolEventSource Log = new(); + + private FetchResultStorePoolEventSource() { } + + [Event(1, Level = EventLevel.Verbose, Message = "Store rented from pool (hit)")] + public void StoreHit() + { + if (IsEnabled()) + { + WriteEvent(1); + } + } + + [Event(2, Level = EventLevel.Informational, Message = "Pool empty, new store allocated (miss)")] + public void StoreMiss() + { + if (IsEnabled()) + { + WriteEvent(2); + } + } + + [Event(3, Level = EventLevel.Warning, Message = "Pool full, store disposed on return (dropped)")] + public void StoreDropped() + { + if (IsEnabled()) + { + WriteEvent(3); + } + } + + [Event(4, Level = EventLevel.Informational, Message = "Pool trimmed to level {0} (limit={1})")] + public void PoolTrimmed(int level, int limit) + { + if (IsEnabled()) + { + WriteEvent(4, level, limit); + } + } +} diff --git a/src/HotChocolate/Fusion/test/Fusion.Execution.Tests/Execution/Clients/SourceSchemaRequestDispatcherTests.cs b/src/HotChocolate/Fusion/test/Fusion.Execution.Tests/Execution/Clients/SourceSchemaRequestDispatcherTests.cs index f360c342e76..64ed1a6c05d 100644 --- a/src/HotChocolate/Fusion/test/Fusion.Execution.Tests/Execution/Clients/SourceSchemaRequestDispatcherTests.cs +++ b/src/HotChocolate/Fusion/test/Fusion.Execution.Tests/Execution/Clients/SourceSchemaRequestDispatcherTests.cs @@ -8,6 +8,7 @@ using HotChocolate.Fusion.Execution; using HotChocolate.Fusion.Execution.Clients; using HotChocolate.Fusion.Execution.Nodes; +using HotChocolate.Fusion.Execution.Results; using HotChocolate.Fusion.Types; using HotChocolate.Language; using Microsoft.Extensions.DependencyInjection; @@ -234,6 +235,9 @@ private static OperationPlanContext CreateContext(ISourceSchemaClient client) .AddSingleton( NoopFusionExecutionDiagnosticEvents.Instance) .AddSingleton(new DefaultErrorHandler([])) + .AddSingleton(new FetchResultStorePool( + levels: [4, 16, 64], + trimInterval: TimeSpan.FromMinutes(5))) .BuildServiceProvider(); var schemaFeatures = new FeatureCollection(); From bf5180f41039fcf64e5e0b721842b7f1b142ddf7 Mon Sep 17 00:00:00 2001 From: Michael Staib Date: Mon, 16 Mar 2026 09:44:00 +0100 Subject: [PATCH 06/13] Optimized client usage --- ...ests.Send_Start_ReceiveDataOnMutation.snap | 11 +++ .../Clients/SourceSchemaHttpClient.cs | 94 ++++++++++++++----- 2 files changed, 83 insertions(+), 22 deletions(-) create mode 100644 src/HotChocolate/AspNetCore/test/AspNetCore.Tests/Subscriptions/Apollo/__snapshots__/WebSocketProtocolTests.Send_Start_ReceiveDataOnMutation.snap diff --git a/src/HotChocolate/AspNetCore/test/AspNetCore.Tests/Subscriptions/Apollo/__snapshots__/WebSocketProtocolTests.Send_Start_ReceiveDataOnMutation.snap b/src/HotChocolate/AspNetCore/test/AspNetCore.Tests/Subscriptions/Apollo/__snapshots__/WebSocketProtocolTests.Send_Start_ReceiveDataOnMutation.snap new file mode 100644 index 00000000000..3fe244bd15b --- /dev/null +++ b/src/HotChocolate/AspNetCore/test/AspNetCore.Tests/Subscriptions/Apollo/__snapshots__/WebSocketProtocolTests.Send_Start_ReceiveDataOnMutation.snap @@ -0,0 +1,11 @@ +{ + "id": "abc", + "type": "data", + "payload": { + "data": { + "onReview": { + "stars": 5 + } + } + } +} diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Clients/SourceSchemaHttpClient.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Clients/SourceSchemaHttpClient.cs index 077349e73b2..3ad2525303a 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Clients/SourceSchemaHttpClient.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Clients/SourceSchemaHttpClient.cs @@ -1,3 +1,4 @@ +using System.Buffers; using System.Collections.Concurrent; using System.Collections.Immutable; using System.Diagnostics; @@ -714,30 +715,33 @@ public override async IAsyncEnumerable ReadAsResultStreamAsy /// /// A streaming response for a single execution node within a batched HTTP request. - /// Results are pushed into a by the background stream + /// Results are pushed into a under lock by the background stream /// reader and signalled via a lightweight . /// The execution node reads lazily via . /// - private sealed class NodeResponse : SourceSchemaClientResponse + private sealed class NodeResponse(Uri uri, string contentType, bool isSuccessful) : SourceSchemaClientResponse { - private readonly ConcurrentQueue _results = new(); +#if NET9_0_OR_GREATER + private readonly Lock _sync = new(); +#else + private readonly object _sync = new(); +#endif + private const int InitialCapacity = 32; + private static readonly ArrayPool s_pool = ArrayPool.Shared; private readonly AsyncAutoResetEvent _signal = new(); + private SourceSchemaResult[] _results = s_pool.Rent(InitialCapacity); + private int _resultsCount; + private SourceSchemaResult[] _drain = s_pool.Rent(InitialCapacity); + private int _drainCount; private volatile bool _completed; private Exception? _error; private bool _disposed; - public NodeResponse(Uri uri, string contentType, bool isSuccessful) - { - Uri = uri; - ContentType = contentType; - IsSuccessful = isSuccessful; - } - - public override Uri Uri { get; } + public override Uri Uri { get; } = uri; - public override string ContentType { get; } + public override string ContentType { get; } = contentType; - public override bool IsSuccessful { get; } + public override bool IsSuccessful { get; } = isSuccessful; /// /// Gets whether at least one result has been written to this response. @@ -752,7 +756,19 @@ internal bool TryWrite(SourceSchemaResult result) return false; } - _results.Enqueue(result); + lock (_sync) + { + if (_resultsCount == _results.Length) + { + var newArray = s_pool.Rent(_results.Length * 2); + _results.AsSpan(0, _resultsCount).CopyTo(newArray); + s_pool.Return(_results, clearArray: true); + _results = newArray; + } + + _results[_resultsCount++] = result; + } + _signal.Set(); return true; } @@ -771,18 +787,20 @@ public override async IAsyncEnumerable ReadAsResultStreamAsy { cancellationToken.ThrowIfCancellationRequested(); - while (_results.TryDequeue(out var result)) + var (buffer, count) = Drain(); + for (var i = 0; i < count; i++) { - yield return result; + yield return buffer[i]; } if (_completed) { - // Final drain — writer may have enqueued between our last - // TryDequeue and the completion flag becoming visible. - while (_results.TryDequeue(out var result)) + // Final drain, writer may have enqueued between our last + // drain and the completion flag becoming visible. + (buffer, count) = Drain(); + for (var i = 0; i < count; i++) { - yield return result; + yield return buffer[i]; } if (_error is not null) @@ -797,6 +815,29 @@ public override async IAsyncEnumerable ReadAsResultStreamAsy } } + private (SourceSchemaResult[] Buffer, int Count) Drain() + { + lock (_sync) + { + if (_resultsCount == 0) + { + return (Array.Empty(), 0); + } + + // Clear the previous drain buffer so it's ready + // to become the next write target. + _drain.AsSpan(0, _drainCount).Clear(); + _drainCount = 0; + + // Swap the buffers so the writer can keep adding + // while we drain outside the lock. + (_results, _drain) = (_drain, _results); + (_resultsCount, _drainCount) = (0, _resultsCount); + } + + return (_drain, _drainCount); + } + public override void Dispose() { if (_disposed) @@ -808,9 +849,18 @@ public override void Dispose() Complete(); - while (_results.TryDequeue(out var result)) + var (buffer, count) = Drain(); + for (var i = 0; i < count; i++) + { + buffer[i].Dispose(); + } + + lock (_sync) { - result.Dispose(); + s_pool.Return(_results, clearArray: true); + s_pool.Return(_drain, clearArray: true); + _results = []; + _drain = []; } } } From 9ce5e291c897e23c475286fecb61b6e884ee3775 Mon Sep 17 00:00:00 2001 From: Michael Staib Date: Mon, 16 Mar 2026 11:01:35 +0100 Subject: [PATCH 07/13] Added diagnostic --- .../Fusion/benchmarks/k6/deep-recursion.js | 2 +- .../PathSegmentPoolDiagnostics.cs | 180 ++++++++++++++++++ .../benchmarks/k6/eShop.Gateway/Program.cs | 5 +- .../Text/Json/PathSegmentMemory.cs | 2 +- .../Text/Json/PathSegmentPool.cs | 78 +++++++- .../Text/Json/PathSegmentPoolEventSource.cs | 95 +++++++++ 6 files changed, 352 insertions(+), 10 deletions(-) create mode 100644 src/HotChocolate/Fusion/benchmarks/k6/eShop.Gateway/PathSegmentPoolDiagnostics.cs create mode 100644 src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentPoolEventSource.cs diff --git a/src/HotChocolate/Fusion/benchmarks/k6/deep-recursion.js b/src/HotChocolate/Fusion/benchmarks/k6/deep-recursion.js index accba207fc0..bc804d5a1e5 100644 --- a/src/HotChocolate/Fusion/benchmarks/k6/deep-recursion.js +++ b/src/HotChocolate/Fusion/benchmarks/k6/deep-recursion.js @@ -3,7 +3,7 @@ import { check } from "k6"; import { Rate } from "k6/metrics"; import { textSummary } from "https://jslib.k6.io/k6-summary/0.0.1/index.js"; -const GRAPHQL_URL = 'http://localhost:5220/graphql'; +const GRAPHQL_URL = 'http://localhost:5000/graphql'; const endpoint = __ENV.GATEWAY_ENDPOINT || GRAPHQL_URL; const mode = __ENV.MODE || "constant"; const isConstant = mode === "constant"; diff --git a/src/HotChocolate/Fusion/benchmarks/k6/eShop.Gateway/PathSegmentPoolDiagnostics.cs b/src/HotChocolate/Fusion/benchmarks/k6/eShop.Gateway/PathSegmentPoolDiagnostics.cs new file mode 100644 index 00000000000..9ff7d13a30f --- /dev/null +++ b/src/HotChocolate/Fusion/benchmarks/k6/eShop.Gateway/PathSegmentPoolDiagnostics.cs @@ -0,0 +1,180 @@ +using System.Diagnostics.Tracing; + +namespace eShop.Gateway; + +/// +/// Listens to PathSegmentPool ETW events and logs aggregated usage metrics. +/// +internal sealed class PathSegmentPoolDiagnostics : EventListener, IHostedService +{ + private readonly Timer _timer; + + private int _poolId; + private int _segmentSize; + private int _maxArrays; + private long _maxBytes; + + private long _rented; + private long _returned; + private long _exhausted; + private long _dropped; + private long _allocated; + private long _trimmedEvents; + private int _lastTrimRemaining; + private int _lastTrimInUse; + private int _peakInUse; + + public PathSegmentPoolDiagnostics() + { + _timer = new Timer(LogSnapshot, null, Timeout.Infinite, Timeout.Infinite); + } + + protected override void OnEventSourceCreated(EventSource eventSource) + { + if (eventSource.Name == "HotChocolate-Fusion-PathSegmentPool") + { + EnableEvents(eventSource, EventLevel.Verbose); + } + } + + protected override void OnEventWritten(EventWrittenEventArgs e) + { + // Event IDs from PathSegmentPoolEventSource: + // 1 = PoolCreated (PoolId, SegmentSize, Arrays, TotalBytes) + // 2 = SegmentRented (ArrayId, Length, PoolId, InUse) + // 3 = SegmentReturned (ArrayId, Length, PoolId, InUse) + // 4 = PoolExhausted (PoolId, MaxArrays) + // 5 = SegmentDropped (ArrayId, Length, PoolId) + // 6 = SegmentAllocated(ArrayId, Length, PoolId) + // 7 = PoolTrimmed (PoolId, Trimmed, Remaining, InUse) + switch (e.EventId) + { + case 1: + if (e.Payload is { Count: >= 4 }) + { + if (e.Payload[0] is int poolId) + { + _poolId = poolId; + } + if (e.Payload[1] is int segmentSize) + { + _segmentSize = segmentSize; + } + if (e.Payload[2] is int maxArrays) + { + _maxArrays = maxArrays; + } + if (e.Payload[3] is long maxBytes) + { + _maxBytes = maxBytes; + } + } + break; + + case 2: + Interlocked.Increment(ref _rented); + if (e.Payload is { Count: >= 4 } && e.Payload[3] is int inUseRent) + { + UpdatePeakInUse(inUseRent); + } + break; + + case 3: + Interlocked.Increment(ref _returned); + break; + + case 4: + Interlocked.Increment(ref _exhausted); + break; + + case 5: + Interlocked.Increment(ref _dropped); + break; + + case 6: + Interlocked.Increment(ref _allocated); + break; + + case 7: + Interlocked.Increment(ref _trimmedEvents); + if (e.Payload is { Count: >= 4 }) + { + if (e.Payload[2] is int remaining) + { + _lastTrimRemaining = remaining; + } + if (e.Payload[3] is int inUseTrim) + { + _lastTrimInUse = inUseTrim; + } + } + break; + } + } + + private void UpdatePeakInUse(int inUse) + { + int current; + do + { + current = _peakInUse; + if (inUse <= current) + { + return; + } + } + while (Interlocked.CompareExchange(ref _peakInUse, inUse, current) != current); + } + + private void LogSnapshot(object? state) + { + var rented = Interlocked.Read(ref _rented); + var returned = Interlocked.Read(ref _returned); + var exhausted = Interlocked.Read(ref _exhausted); + var dropped = Interlocked.Read(ref _dropped); + var allocated = Interlocked.Read(ref _allocated); + var trimmedEvents = Interlocked.Read(ref _trimmedEvents); + var outstanding = rented - returned; + + Console.WriteLine( + "[PathSegmentPool] PoolId={0}, SegmentSize={1}, MaxArrays={2}, MaxBytes={3}, " + + "Rented={4}, Returned={5}, Outstanding={6}, PeakInUse={7}, " + + "Exhausted={8}, Allocated={9}, Dropped={10}, " + + "TrimmedEvents={11}, LastTrimRemaining={12}, LastTrimInUse={13}", + _poolId, + _segmentSize, + _maxArrays, + _maxBytes, + rented, + returned, + outstanding, + _peakInUse, + exhausted, + allocated, + dropped, + trimmedEvents, + _lastTrimRemaining, + _lastTrimInUse); + } + + public Task StartAsync(CancellationToken cancellationToken) + { + Console.WriteLine("[PathSegmentPool] Diagnostics started"); + _timer.Change(TimeSpan.FromSeconds(5), TimeSpan.FromSeconds(5)); + return Task.CompletedTask; + } + + public Task StopAsync(CancellationToken cancellationToken) + { + _timer.Change(Timeout.Infinite, Timeout.Infinite); + LogSnapshot(null); + Console.WriteLine("[PathSegmentPool] Diagnostics stopped - final snapshot logged above"); + return Task.CompletedTask; + } + + public override void Dispose() + { + _timer.Dispose(); + base.Dispose(); + } +} diff --git a/src/HotChocolate/Fusion/benchmarks/k6/eShop.Gateway/Program.cs b/src/HotChocolate/Fusion/benchmarks/k6/eShop.Gateway/Program.cs index e14429a81db..154ff932956 100644 --- a/src/HotChocolate/Fusion/benchmarks/k6/eShop.Gateway/Program.cs +++ b/src/HotChocolate/Fusion/benchmarks/k6/eShop.Gateway/Program.cs @@ -1,4 +1,4 @@ -ThreadPool.SetMinThreads(1024, 1024); +ThreadPool.SetMinThreads(512, 512); var builder = WebApplication.CreateBuilder(args); @@ -11,8 +11,7 @@ builder .AddGraphQLGateway() - .ModifyPlannerOptions(o => o.EnableRequestGrouping = true) - .AddFileSystemConfiguration("./gateway.far"); + .AddFileSystemConfiguration("gateway.far"); var app = builder.Build(); diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentMemory.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentMemory.cs index 5bc028182a1..194a511a4e0 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentMemory.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentMemory.cs @@ -4,7 +4,7 @@ internal static class PathSegmentMemory { private static PathSegmentPool s_pool = new( segmentArraySize: 64, - levels: [64, 256, 1024], + levels: [4096, 8192, 16384], trimInterval: TimeSpan.FromMinutes(5), preAllocate: false); diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentPool.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentPool.cs index eace6ebd8fa..4a1a77569aa 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentPool.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentPool.cs @@ -1,10 +1,14 @@ using System.Diagnostics; +using static HotChocolate.Fusion.Text.Json.PathSegmentPoolEventSource; namespace HotChocolate.Fusion.Text.Json; internal sealed class PathSegmentPool : IDisposable { + private static int s_nextPoolId; internal readonly int _segmentArraySize; + private readonly int _poolId; + private readonly int _numberOfArrays; private readonly Bucket _bucket; public PathSegmentPool(int segmentArraySize, int[] levels, TimeSpan trimInterval, bool preAllocate) @@ -18,12 +22,42 @@ public PathSegmentPool(int segmentArraySize, int[] levels, TimeSpan trimInterval "Trim interval should be greater than 10 seconds to avoid excessive trimming."); _segmentArraySize = segmentArraySize; - _bucket = new Bucket(segmentArraySize, levels, trimInterval, preAllocate); + _poolId = Interlocked.Increment(ref s_nextPoolId); + _numberOfArrays = levels[levels.Length - 1]; + _bucket = new Bucket(_poolId, segmentArraySize, levels, trimInterval, preAllocate); + + var log = Log; + if (log.IsEnabled()) + { + log.PoolCreated( + _poolId, + _segmentArraySize, + _numberOfArrays, + (long)_numberOfArrays * _segmentArraySize * sizeof(int)); + } } public int[] Rent() { - return _bucket.Rent() ?? new int[_segmentArraySize]; + var log = Log; + var buffer = _bucket.Rent(); + + if (buffer is null) + { + buffer = new int[_segmentArraySize]; + + if (log.IsEnabled()) + { + log.PoolExhausted(_poolId, _numberOfArrays); + } + } + + if (log.IsEnabled()) + { + log.SegmentRented(buffer.GetHashCode(), buffer.Length, _poolId, _bucket.InUse); + } + + return buffer; } public void Return(int[] array) @@ -33,13 +67,25 @@ public void Return(int[] array) return; } - _bucket.Return(array); + var log = Log; + var returned = _bucket.Return(array); + + if (log.IsEnabled()) + { + log.SegmentReturned(array.GetHashCode(), array.Length, _poolId, _bucket.InUse); + } + + if (!returned && log.IsEnabled()) + { + log.SegmentDropped(array.GetHashCode(), array.Length, _poolId); + } } public void Dispose() => _bucket.Dispose(); private sealed class Bucket : IDisposable { + private readonly int _poolId; private readonly int _segmentArraySize; private readonly int[]?[] _buffers; private readonly int[] _levels; @@ -50,6 +96,7 @@ private sealed class Bucket : IDisposable private int _index; internal Bucket( + int poolId, int segmentArraySize, int[] levels, TimeSpan trimInterval, @@ -57,6 +104,7 @@ internal Bucket( { var numberOfBuffers = levels[levels.Length - 1]; + _poolId = poolId; _segmentArraySize = segmentArraySize; _buffers = new int[numberOfBuffers][]; _levels = levels; @@ -78,6 +126,8 @@ internal Bucket( _trimTimer = new Timer(static b => ((Bucket)b!).Trim(), this, trimInterval, trimInterval); } + internal int InUse => _inUse; + internal int[]? Rent() { Interlocked.Increment(ref _inUse); @@ -110,20 +160,27 @@ internal Bucket( if (allocateBuffer) { buffer = new int[_segmentArraySize]; + + var log = Log; + if (log.IsEnabled()) + { + log.SegmentAllocated(buffer.GetHashCode(), _segmentArraySize, _poolId); + } } return buffer; } - internal void Return(int[] array) + internal bool Return(int[] array) { Interlocked.Decrement(ref _inUse); if (array.Length != _segmentArraySize) { - return; + return false; } + var returned = false; var lockTaken = false; try @@ -133,6 +190,7 @@ internal void Return(int[] array) if (_index > 0) { _buffers[--_index] = array; + returned = true; } } finally @@ -142,6 +200,8 @@ internal void Return(int[] array) _lock.Exit(false); } } + + return returned; } private void Trim() @@ -161,6 +221,7 @@ private void Trim() return; } + var trimmed = 0; var lockTaken = false; try @@ -174,6 +235,7 @@ private void Trim() if (_buffers[i] != null) { _buffers[i] = null; + trimmed++; } } @@ -191,6 +253,12 @@ private void Trim() } _currentLevel = previousLevel; + + var log = Log; + if (log.IsEnabled()) + { + log.PoolTrimmed(_poolId, trimmed, previousLimit, _inUse); + } } public void Dispose() => _trimTimer.Dispose(); diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentPoolEventSource.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentPoolEventSource.cs new file mode 100644 index 00000000000..286bf765ab0 --- /dev/null +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/PathSegmentPoolEventSource.cs @@ -0,0 +1,95 @@ +using System.Diagnostics.Tracing; + +namespace HotChocolate.Fusion.Text.Json; + +[EventSource(Name = "HotChocolate-Fusion-PathSegmentPool")] +internal sealed class PathSegmentPoolEventSource : EventSource +{ + public static readonly PathSegmentPoolEventSource Log = new(); + + private PathSegmentPoolEventSource() { } + + [Event( + eventId: 1, + Level = EventLevel.Informational, + Message = "Path segment pool created (PoolId={0}, SegmentSize={1}, Arrays={2}, TotalBytes={3})")] + public void PoolCreated(int poolId, int segmentSize, int totalArrays, long totalBytes) + { + if (IsEnabled()) + { + WriteEvent(1, poolId, segmentSize, totalArrays, totalBytes); + } + } + + [Event( + eventId: 2, + Level = EventLevel.Verbose, + Message = "Segment rented (ArrayId={0}, Length={1}, PoolId={2}, InUse={3})")] + public void SegmentRented(int arrayId, int arrayLength, int poolId, int inUse) + { + if (IsEnabled(EventLevel.Verbose, EventKeywords.None)) + { + WriteEvent(2, arrayId, arrayLength, poolId, inUse); + } + } + + [Event( + eventId: 3, + Level = EventLevel.Verbose, + Message = "Segment returned (ArrayId={0}, Length={1}, PoolId={2}, InUse={3})")] + public void SegmentReturned(int arrayId, int arrayLength, int poolId, int inUse) + { + if (IsEnabled(EventLevel.Verbose, EventKeywords.None)) + { + WriteEvent(3, arrayId, arrayLength, poolId, inUse); + } + } + + [Event( + eventId: 4, + Level = EventLevel.Warning, + Message = "Path segment pool exhausted (PoolId={0}, MaxArrays={1})")] + public void PoolExhausted(int poolId, int maxArrays) + { + if (IsEnabled()) + { + WriteEvent(4, poolId, maxArrays); + } + } + + [Event( + eventId: 5, + Level = EventLevel.Informational, + Message = "Segment dropped - pool full (ArrayId={0}, Length={1}, PoolId={2})")] + public void SegmentDropped(int arrayId, int arrayLength, int poolId) + { + if (IsEnabled()) + { + WriteEvent(5, arrayId, arrayLength, poolId); + } + } + + [Event( + eventId: 6, + Level = EventLevel.Informational, + Message = "Segment allocated (ArrayId={0}, Length={1}, PoolId={2})")] + public void SegmentAllocated(int arrayId, int arrayLength, int poolId) + { + if (IsEnabled()) + { + WriteEvent(6, arrayId, arrayLength, poolId); + } + } + + [Event( + eventId: 7, + Level = EventLevel.Informational, + Message = "Path segment pool trimmed (PoolId={0}, Trimmed={1}, Remaining={2}, InUse={3})")] + public void PoolTrimmed(int poolId, int trimmed, int remaining, int inUse) + { + if (IsEnabled()) + { + WriteEvent(7, poolId, trimmed, remaining, inUse); + } + } +} From 33180d3a681992477b76fa4640662cb43b58b927 Mon Sep 17 00:00:00 2001 From: Michael Staib Date: Mon, 16 Mar 2026 11:02:40 +0100 Subject: [PATCH 08/13] align ports --- .../k6/eShop.Gateway/Properties/launchSettings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/HotChocolate/Fusion/benchmarks/k6/eShop.Gateway/Properties/launchSettings.json b/src/HotChocolate/Fusion/benchmarks/k6/eShop.Gateway/Properties/launchSettings.json index fd34c7bfd5c..7981999c10a 100644 --- a/src/HotChocolate/Fusion/benchmarks/k6/eShop.Gateway/Properties/launchSettings.json +++ b/src/HotChocolate/Fusion/benchmarks/k6/eShop.Gateway/Properties/launchSettings.json @@ -4,8 +4,8 @@ "commandName": "Project", "dotnetRunMessages": true, "launchBrowser": true, - "launchUrl": "http://localhost:5220/graphql", - "applicationUrl": "http://localhost:5220", + "launchUrl": "http://localhost:5000/graphql", + "applicationUrl": "http://localhost:5000", "environmentVariables": { "ASPNETCORE_ENVIRONMENT": "Production" } From 44cbb9e33d234c99d9aae312d3ade489ea83a454 Mon Sep 17 00:00:00 2001 From: Michael Staib Date: Mon, 16 Mar 2026 11:08:33 +0100 Subject: [PATCH 09/13] polish --- .../JsonOperationPlanFormatter.cs | 58 ++++++++++++------- .../Serialization/JsonOperationPlanParser.cs | 11 ++++ .../Serialization/OperationPlanFormatter.cs | 10 ++++ .../Serialization/OperationPlanParser.cs | 9 +++ .../YamlOperationPlanFormatter.cs | 6 ++ 5 files changed, 73 insertions(+), 21 deletions(-) diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/JsonOperationPlanFormatter.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/JsonOperationPlanFormatter.cs index e03b1d6eace..754909a140c 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/JsonOperationPlanFormatter.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/JsonOperationPlanFormatter.cs @@ -9,6 +9,14 @@ namespace HotChocolate.Fusion.Execution.Nodes.Serialization; +/// +/// Formats an as a JSON document, +/// including its operation metadata, execution nodes, and optional trace information. +/// +/// +/// Optional to control JSON formatting. +/// Defaults to compact (non-indented) output with relaxed encoding. +/// public sealed class JsonOperationPlanFormatter(JsonWriterOptions? options = null) : OperationPlanFormatter { private readonly JsonWriterOptions _writerOptions = options ?? new JsonWriterOptions @@ -17,6 +25,7 @@ public sealed class JsonOperationPlanFormatter(JsonWriterOptions? options = null Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping }; + /// public override string Format(OperationPlan plan, OperationPlanTrace? trace = null) { using var writer = new PooledArrayWriter(); @@ -24,6 +33,13 @@ public override string Format(OperationPlan plan, OperationPlanTrace? trace = nu return Encoding.UTF8.GetString(writer.WrittenSpan); } + /// + /// Formats the specified as JSON and writes the + /// UTF-8 encoded output to . + /// + /// The buffer writer to receive the JSON output. + /// The operation plan to format. + /// Optional trace information to include in the output. public void Format(IBufferWriter writer, OperationPlan plan, OperationPlanTrace? trace = null) { using var jsonWriter = new Utf8JsonWriter(writer, _writerOptions); @@ -58,7 +74,7 @@ public void Format(IBufferWriter writer, OperationPlan plan, OperationPlan } jsonWriter.WritePropertyName("nodes"); - WriteNodes(jsonWriter, plan.AllNodes, trace, plan.Operation); + WriteNodes(jsonWriter, plan.Operation, plan.AllNodes, trace); jsonWriter.WriteEndObject(); } @@ -72,7 +88,7 @@ internal void Format(IBufferWriter writer, Operation operation, ImmutableA WriteOperation(jsonWriter, operation); jsonWriter.WritePropertyName("nodes"); - WriteNodes(jsonWriter, allNodes, null, operation); + WriteNodes(jsonWriter, operation, allNodes, null); jsonWriter.WriteEndObject(); } @@ -100,9 +116,9 @@ private static void WriteOperation( private static void WriteNodes( Utf8JsonWriter jsonWriter, + Operation operation, ImmutableArray allNodes, - OperationPlanTrace? trace, - Operation operation) + OperationPlanTrace? trace) { jsonWriter.WriteStartArray(); @@ -114,19 +130,19 @@ private static void WriteNodes( switch (node) { case OperationExecutionNode operationNode: - WriteOperationNode(jsonWriter, operationNode, nodeTrace, operation); + WriteOperationNode(jsonWriter, operation, operationNode, nodeTrace); break; case OperationBatchExecutionNode batchNode: - WriteOperationBatchNode(jsonWriter, batchNode, nodeTrace, operation); + WriteOperationBatchNode(jsonWriter, operation, batchNode, nodeTrace); break; case IntrospectionExecutionNode introspectionNode: - WriteIntrospectionNode(jsonWriter, introspectionNode, nodeTrace, operation); + WriteIntrospectionNode(jsonWriter, operation, introspectionNode, nodeTrace); break; case NodeFieldExecutionNode nodeExecutionNode: - WriteNodeFieldNode(jsonWriter, nodeExecutionNode, nodeTrace, operation); + WriteNodeFieldNode(jsonWriter, operation, nodeExecutionNode, nodeTrace); break; } } @@ -136,9 +152,9 @@ private static void WriteNodes( private static void WriteOperationNode( Utf8JsonWriter jsonWriter, + Operation operation, OperationExecutionNode node, - ExecutionNodeTrace? trace, - Operation operation) + ExecutionNodeTrace? trace) { jsonWriter.WriteStartObject(); jsonWriter.WriteNumber("id", node.Id); @@ -231,16 +247,16 @@ private static void WriteOperationNode( jsonWriter.WriteEndArray(); } - TryWriteNodeTrace(jsonWriter, trace, operation); + TryWriteNodeTrace(jsonWriter, operation, trace); jsonWriter.WriteEndObject(); } private static void WriteOperationBatchNode( Utf8JsonWriter jsonWriter, + Operation operation, OperationBatchExecutionNode node, - ExecutionNodeTrace? trace, - Operation operation) + ExecutionNodeTrace? trace) { jsonWriter.WriteStartObject(); jsonWriter.WriteNumber("id", node.Id); @@ -337,16 +353,16 @@ private static void WriteOperationBatchNode( jsonWriter.WriteEndArray(); } - TryWriteNodeTrace(jsonWriter, trace, operation); + TryWriteNodeTrace(jsonWriter, operation, trace); jsonWriter.WriteEndObject(); } private static void WriteIntrospectionNode( Utf8JsonWriter jsonWriter, + Operation operation, IntrospectionExecutionNode node, - ExecutionNodeTrace? trace, - Operation operation) + ExecutionNodeTrace? trace) { jsonWriter.WriteStartObject(); jsonWriter.WriteNumber("id", node.Id); @@ -367,16 +383,16 @@ private static void WriteIntrospectionNode( TryWriteConditions(jsonWriter, node); - TryWriteNodeTrace(jsonWriter, trace, operation); + TryWriteNodeTrace(jsonWriter, operation, trace); jsonWriter.WriteEndObject(); } private static void WriteNodeFieldNode( Utf8JsonWriter jsonWriter, + Operation operation, NodeFieldExecutionNode node, - ExecutionNodeTrace? trace, - Operation operation) + ExecutionNodeTrace? trace) { jsonWriter.WriteStartObject(); jsonWriter.WriteNumber("id", node.Id); @@ -398,12 +414,12 @@ private static void WriteNodeFieldNode( TryWriteConditions(jsonWriter, node); - TryWriteNodeTrace(jsonWriter, trace, operation); + TryWriteNodeTrace(jsonWriter, operation, trace); jsonWriter.WriteEndObject(); } - private static void TryWriteNodeTrace(Utf8JsonWriter jsonWriter, ExecutionNodeTrace? trace, Operation operation) + private static void TryWriteNodeTrace(Utf8JsonWriter jsonWriter, Operation operation, ExecutionNodeTrace? trace) { if (trace is not null) { diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/JsonOperationPlanParser.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/JsonOperationPlanParser.cs index 64ae99b087f..34448429b22 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/JsonOperationPlanParser.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/JsonOperationPlanParser.cs @@ -6,10 +6,20 @@ namespace HotChocolate.Fusion.Execution.Nodes.Serialization; +/// +/// Parses a JSON-encoded operation plan into an , +/// reconstructing the operation, execution nodes, and their dependency graph. +/// public sealed class JsonOperationPlanParser : OperationPlanParser { private readonly OperationCompiler _operationCompiler; + /// + /// Initializes a new instance of . + /// + /// + /// The compiler used to compile parsed operation definitions. + /// public JsonOperationPlanParser(OperationCompiler operationCompiler) { ArgumentNullException.ThrowIfNull(operationCompiler); @@ -17,6 +27,7 @@ public JsonOperationPlanParser(OperationCompiler operationCompiler) _operationCompiler = operationCompiler; } + /// public override OperationPlan Parse(ReadOnlyMemory planSourceText) { using var document = JsonDocument.Parse(planSourceText); diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/OperationPlanFormatter.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/OperationPlanFormatter.cs index f21bf65c656..6d2c54602ca 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/OperationPlanFormatter.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/OperationPlanFormatter.cs @@ -1,6 +1,16 @@ namespace HotChocolate.Fusion.Execution.Nodes.Serialization; +/// +/// Base class for formatters that serialize an +/// into a human- or machine-readable string representation. +/// public abstract class OperationPlanFormatter { + /// + /// Formats the specified as a string. + /// + /// The operation plan to format. + /// Optional trace information to include in the output. + /// A string representation of the operation plan. public abstract string Format(OperationPlan plan, OperationPlanTrace? trace = null); } diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/OperationPlanParser.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/OperationPlanParser.cs index 5ac1d452789..1018f1cb07a 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/OperationPlanParser.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/OperationPlanParser.cs @@ -1,6 +1,15 @@ namespace HotChocolate.Fusion.Execution.Nodes.Serialization; +/// +/// Base class for parsers that deserialize an +/// from its serialized representation. +/// public abstract class OperationPlanParser { + /// + /// Parses the specified into an . + /// + /// The serialized operation plan bytes to parse. + /// The deserialized . public abstract OperationPlan Parse(ReadOnlyMemory planSourceText); } diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/YamlOperationPlanFormatter.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/YamlOperationPlanFormatter.cs index e8499be2cc9..d028ab0cef5 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/YamlOperationPlanFormatter.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/YamlOperationPlanFormatter.cs @@ -2,8 +2,14 @@ namespace HotChocolate.Fusion.Execution.Nodes.Serialization; +/// +/// Formats an as a YAML document. +/// This formatter is intended for testing purposes and is primarily used +/// to produce human-readable test snapshots. +/// public sealed class YamlOperationPlanFormatter : OperationPlanFormatter { + /// public override string Format(OperationPlan plan, OperationPlanTrace? trace = null) { var sb = new StringBuilder(); From 490f84226bb9fe08c89d10d9fb58ac20c273a175 Mon Sep 17 00:00:00 2001 From: Michael Staib Date: Mon, 16 Mar 2026 11:13:21 +0100 Subject: [PATCH 10/13] formatting --- .../src/Fusion.Execution/Execution/OperationPlanContext.cs | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs index 0355b794873..12eb771ee89 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs @@ -274,9 +274,9 @@ private CompactPath ToResultPath(SelectionPath selectionSet) return CompactPath.Root; } - Span buffer = stackalloc int[32]; - // This helper can run concurrently across nodes; avoid using the request-local + // CompactPathBuilder can run concurrently across nodes; avoid using the request-local // pool here since that pool is synchronized through FetchResultStore's lock. + Span buffer = stackalloc int[32]; var builder = new CompactPathBuilder(buffer, pool: null); var operation = OperationPlan.Operation; var currentSelectionSet = operation.RootSelectionSet; @@ -502,9 +502,7 @@ public async ValueTask DisposeAsync() _disposed = true; DisposeNodeState(); _sourceSchemaDispatcher.Abort(); - _resultStorePool.Return(_resultStore); - await _clientScope.DisposeAsync(); } } From 1f566c66f53c1cd46d0fb287a0ddfacf379ec068 Mon Sep 17 00:00:00 2001 From: Michael Staib Date: Mon, 16 Mar 2026 11:16:44 +0100 Subject: [PATCH 11/13] plished docs --- .../Fusion/src/Fusion.Execution/Text/Json/CompactPath.cs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPath.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPath.cs index 862f0600df9..3a34272e562 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPath.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPath.cs @@ -3,10 +3,10 @@ namespace HotChocolate.Fusion.Text.Json; /// -/// A compact, integer-based path representation for the Fusion execution engine. -/// Each segment is either a positive Selection ID (field) or a bitwise-NOT array index (negative). -/// The backing array uses [0] = length encoding: _segments[0] holds the number of segments, -/// and _segments[1..length] hold the actual path segments. +/// Represents a path through a GraphQL result tree using integer segments. +/// The sign bit distinguishes between the two segment kinds: +/// positive values are field selection IDs, and negative values are array indices +/// (stored as the bitwise complement of the index). /// public readonly struct CompactPath : IEquatable { From 2753c97e84f29f5ec9f3800f6bb771e3771735e7 Mon Sep 17 00:00:00 2001 From: Michael Staib Date: Mon, 16 Mar 2026 11:18:03 +0100 Subject: [PATCH 12/13] docs --- .../Fusion.Execution/Text/Json/CompactPath.cs | 34 +++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPath.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPath.cs index 3a34272e562..bbdee5ffaa3 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPath.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompactPath.cs @@ -10,6 +10,9 @@ namespace HotChocolate.Fusion.Text.Json; /// public readonly struct CompactPath : IEquatable { + /// + /// Gets the empty root path. + /// public static CompactPath Root => default; private readonly int[]? _segments; @@ -17,19 +20,37 @@ namespace HotChocolate.Fusion.Text.Json; internal CompactPath(int[] segments) => _segments = segments; + /// + /// Gets the path segments as a read-only span. + /// public ReadOnlySpan Segments => _segments is null ? ReadOnlySpan.Empty : _segments.AsSpan(1, _segments[0]); + /// + /// Gets the number of segments in the path. + /// public int Length => _segments?[0] ?? 0; + /// + /// Gets a value indicating whether this is the root path (i.e. has no segments). + /// public bool IsRoot => _segments is null; + /// + /// Gets the segment at the specified index. + /// + /// The zero-based segment index. public int this[int index] => _segments![index + 1]; internal int[]? UnsafeGetBackingArray() => _segments; + /// + /// Converts this compact path into a by resolving + /// selection IDs to their response names using the given operation. + /// + /// The operation used to resolve selection IDs. public Path ToPath(Operation operation) { ArgumentNullException.ThrowIfNull(operation); @@ -59,18 +80,31 @@ public Path ToPath(Operation operation) return path; } + /// + /// Converts this compact path into a and appends an array index segment. + /// + /// The operation used to resolve selection IDs. + /// The array index to append. public Path ToPath(Operation operation, int appendIndex) => ToPath(operation).Append(appendIndex); + /// + /// Converts this compact path into a and appends a field name segment. + /// + /// The operation used to resolve selection IDs. + /// The field name to append. public Path ToPath(Operation operation, string appendField) => ToPath(operation).Append(appendField); + /// public bool Equals(CompactPath other) => Segments.SequenceEqual(other.Segments); + /// public override bool Equals(object? obj) => obj is CompactPath other && Equals(other); + /// public override int GetHashCode() { var hashCode = new HashCode(); From c55339e8abe2ac0fcdbd047adf1fe2638fb5da06 Mon Sep 17 00:00:00 2001 From: Michael Staib Date: Mon, 16 Mar 2026 11:20:58 +0100 Subject: [PATCH 13/13] snapshot --- ...onDiagnosticListenerTests.Source_Schema_Transport_Error.snap | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/HotChocolate/Fusion/test/Fusion.Diagnostics.Tests/__snapshots__/FusionActivityExecutionDiagnosticListenerTests.Source_Schema_Transport_Error.snap b/src/HotChocolate/Fusion/test/Fusion.Diagnostics.Tests/__snapshots__/FusionActivityExecutionDiagnosticListenerTests.Source_Schema_Transport_Error.snap index 420bf564db8..e2812d61e7e 100644 --- a/src/HotChocolate/Fusion/test/Fusion.Diagnostics.Tests/__snapshots__/FusionActivityExecutionDiagnosticListenerTests.Source_Schema_Transport_Error.snap +++ b/src/HotChocolate/Fusion/test/Fusion.Diagnostics.Tests/__snapshots__/FusionActivityExecutionDiagnosticListenerTests.Source_Schema_Transport_Error.snap @@ -153,7 +153,7 @@ }, { "Key": "exception.stacktrace", - "Value": "System.Net.Http.HttpRequestException: Response status code does not indicate success: 500 (Internal Server Error).\n at System.Net.Http.HttpResponseMessage.EnsureSuccessStatusCode()\n at HotChocolate.Fusion.Transport.Http.GraphQLHttpResponse.ReadAsResultAsync(CancellationToken cancellationToken) in GraphQLHttpResponse.cs:line 292\n at HotChocolate.Fusion.Execution.Clients.SourceSchemaHttpClient.Response.ReadAsResultStreamAsync(CancellationToken cancellationToken)+MoveNext() in SourceSchemaHttpClient.cs:line 577\n at HotChocolate.Fusion.Execution.Clients.SourceSchemaHttpClient.Response.ReadAsResultStreamAsync(CancellationToken cancellationToken)+System.Threading.Tasks.Sources.IValueTaskSource.GetResult()\n at HotChocolate.Fusion.Execution.Nodes.OperationExecutionNode.OnExecuteAsync(OperationPlanContext context, CancellationToken cancellationToken) in OperationExecutionNode.cs:line 160\n at HotChocolate.Fusion.Execution.Nodes.OperationExecutionNode.OnExecuteAsync(OperationPlanContext context, CancellationToken cancellationToken) in OperationExecutionNode.cs:line 160" + "Value": "System.Net.Http.HttpRequestException: Response status code does not indicate success: 500 (Internal Server Error).\n at System.Net.Http.HttpResponseMessage.EnsureSuccessStatusCode()\n at HotChocolate.Fusion.Transport.Http.GraphQLHttpResponse.ReadAsResultAsync(CancellationToken cancellationToken) in GraphQLHttpResponse.cs:line 292\n at HotChocolate.Fusion.Execution.Clients.SourceSchemaHttpClient.Response.ReadAsResultStreamAsync(CancellationToken cancellationToken)+MoveNext() in SourceSchemaHttpClient.cs:line 578\n at HotChocolate.Fusion.Execution.Clients.SourceSchemaHttpClient.Response.ReadAsResultStreamAsync(CancellationToken cancellationToken)+System.Threading.Tasks.Sources.IValueTaskSource.GetResult()\n at HotChocolate.Fusion.Execution.Nodes.OperationExecutionNode.OnExecuteAsync(OperationPlanContext context, CancellationToken cancellationToken) in OperationExecutionNode.cs:line 160\n at HotChocolate.Fusion.Execution.Nodes.OperationExecutionNode.OnExecuteAsync(OperationPlanContext context, CancellationToken cancellationToken) in OperationExecutionNode.cs:line 160" }, { "Key": "exception.type",