diff --git a/src/HotChocolate/AspNetCore/src/Transport.Formatters/EventStreamResultFormatter.cs b/src/HotChocolate/AspNetCore/src/Transport.Formatters/EventStreamResultFormatter.cs index bcc8a3c24ec..0c68d6852b4 100644 --- a/src/HotChocolate/AspNetCore/src/Transport.Formatters/EventStreamResultFormatter.cs +++ b/src/HotChocolate/AspNetCore/src/Transport.Formatters/EventStreamResultFormatter.cs @@ -76,6 +76,7 @@ private async ValueTask FormatOperationResultAsync( } finally { + formatContext?.Dispose(); scope?.Dispose(); } } @@ -105,14 +106,21 @@ private async ValueTask FormatResultBatchAsync( try { OperationResultFormatterContext? formatContext = null; - MessageHelper.FormatNextMessage( - _payloadFormatter, - operationResult, - writer, - useIncrementalRfc1, - ref formatContext); - await writer.FlushAsync(ct).ConfigureAwait(false); - keepAlive?.Reset(); + try + { + MessageHelper.FormatNextMessage( + _payloadFormatter, + operationResult, + writer, + useIncrementalRfc1, + ref formatContext); + await writer.FlushAsync(ct).ConfigureAwait(false); + keepAlive?.Reset(); + } + finally + { + formatContext?.Dispose(); + } } catch (Exception ex) { @@ -318,6 +326,7 @@ public async Task ProcessAsync(CancellationToken ct) } finally { + _formatContext?.Dispose(); await responseStream.DisposeAsync().ConfigureAwait(false); } } diff --git a/src/HotChocolate/AspNetCore/src/Transport.Formatters/IncrementalResultFormatAdapters.cs b/src/HotChocolate/AspNetCore/src/Transport.Formatters/IncrementalResultFormatAdapters.cs index ac9bd217c85..6dc97b774c6 100644 --- a/src/HotChocolate/AspNetCore/src/Transport.Formatters/IncrementalResultFormatAdapters.cs +++ b/src/HotChocolate/AspNetCore/src/Transport.Formatters/IncrementalResultFormatAdapters.cs @@ -1,5 +1,7 @@ using System.Text.Json; +using HotChocolate.Buffers; using HotChocolate.Execution; +using HotChocolate.Language; using HotChocolate.Text.Json; using static HotChocolate.Execution.JsonValueFormatter; @@ -7,14 +9,30 @@ namespace HotChocolate.Transport.Formatters; internal static class IncrementalRfc1ResultFormatAdapter { + private static readonly JsonWriterOptions s_jsonWriterOptions = new() { SkipValidation = true }; + public static void WriteIncremental( JsonWriter writer, OperationResult result, JsonSerializerOptions options, OperationResultFormatterContext context) { + // We capture the defragmentized document from the first result that carries it. + // In a stream of incremental results, the document is the same on every result. + context.InitializeDocument(result.Document); + var deferLookup = context.DeferSelectionLookup; var pending = context.PendingResults; + if (deferLookup is not null) + { + // Next, we cache the initial result's data for later merge with deferred payloads. + // The v0.1 format of the incremental delivery spec did not deduplicate data, + // so this adapter needs to reverse the deduplication by capturing individual field values + // from the initial payload and splicing them back into deferred payloads as needed + // to reconstruct the full selection for RFC-1 clients. + CaptureResultData(context, Path.Root, result.Data); + } + if (result.Pending is { Count: > 0 } pendingResults) { for (var i = 0; i < pendingResults.Count; i++) @@ -40,13 +58,61 @@ public static void WriteIncremental( if (item is IncrementalObjectResult objectResult) { - var path = CombinePath(pendingResult.Path ?? Path.Root, objectResult.SubPath); - (entries ??= []).Add( - LegacyIncrementalEntry.ForData( - path, - pendingResult.Label, - objectResult.Data, - objectResult.Errors)); + var pendingPath = pendingResult.Path ?? Path.Root; + var path = CombinePath(pendingPath, objectResult.SubPath); + + if (deferLookup is not null + && objectResult.Data.HasValue + && !objectResult.Data.Value.IsValueNull) + { + // We serialize the deferred patch data into the cache buffer so the + // backing bytes stay stable for the lifetime of the parsed document. + // The same document is then used for both caching field values and + // building the merged payload, avoiding a second serialization pass. + using var document = SerializeDataToCache(context, objectResult.Data.Value); + + // Next, we walk every field in the parsed JSON and store it by its path. + // Later, when we build the merged payload, we pull these cached values + // back in to fill fields that the server did not repeat in the patch. + CaptureElement(context, path, document.RootElement); + + // Try to combine the patch with cached fields into a complete object + // for RFC-1 clients; if there is no overlap, we emit the patch as-is. + if (TryCreateMergedData( + context, + pendingPath, + path, + pendingResult.Label, + document, + out var mergedData)) + { + (entries ??= []).Add( + LegacyIncrementalEntry.ForData( + path, + pendingResult.Label, + data: null, + objectResult.Errors, + mergedData)); + } + else + { + (entries ??= []).Add( + LegacyIncrementalEntry.ForData( + path, + pendingResult.Label, + objectResult.Data, + objectResult.Errors)); + } + } + else + { + (entries ??= []).Add( + LegacyIncrementalEntry.ForData( + path, + pendingResult.Label, + objectResult.Data, + objectResult.Errors)); + } } else if (item is IIncrementalListResult listResult) { @@ -106,6 +172,390 @@ public static void WriteIncremental( } } + private static bool TryCreateMergedData( + OperationResultFormatterContext context, + Path pendingPath, + Path path, + string? label, + JsonDocument patchDocument, + out ReadOnlyMemorySegment mergedData) + { + mergedData = default; + + var deferLookup = context.DeferSelectionLookup; + + if (deferLookup is null + || !TryResolveSelectionForPath(deferLookup, pendingPath, path, label, out var selection) + || !selection.HasFields) + { + return false; + } + + if (patchDocument.RootElement.ValueKind is not JsonValueKind.Object) + { + return false; + } + + var cacheBuffer = context.CacheBuffer; + var start = cacheBuffer.Length; + var mergedWriter = new JsonWriter(cacheBuffer, s_jsonWriterOptions); + WriteMergedObject(mergedWriter, context, path, patchDocument.RootElement, selection); + mergedData = cacheBuffer.GetWrittenMemorySegment(start, cacheBuffer.Length - start); + return true; + } + + /// + /// Finds the defer selection tree that corresponds to the given path. + /// First resolves the root selection by label or path, then walks any + /// remaining path segments to drill into child selections. + /// + private static bool TryResolveSelectionForPath( + DeferSelectionLookup deferLookup, + Path pendingPath, + Path path, + string? label, + out DeferSelectionTree selection) + { + if (!deferLookup.TryResolveRoot(pendingPath, label, out var selectionPath, out selection) + && !deferLookup.TryResolveRoot(path, label, out selectionPath, out selection)) + { + return false; + } + + if (selectionPath.Equals(path)) + { + return true; + } + + var pendingSegments = selectionPath.ToList(); + var pathSegments = path.ToList(); + + if (pathSegments.Count < pendingSegments.Count) + { + return false; + } + + for (var i = 0; i < pendingSegments.Count; i++) + { + if (!Equals(pendingSegments[i], pathSegments[i])) + { + return false; + } + } + + for (var i = pendingSegments.Count; i < pathSegments.Count; i++) + { + switch (pathSegments[i]) + { + case string fieldName when selection.TryGetField(fieldName, out var childSelection): + selection = childSelection; + break; + + case int: + continue; + + default: + return false; + } + } + + return true; + } + + /// + /// Writes a JSON object that merges the deferred patch with previously cached fields. + /// For each field the defer selection expects, we check whether the patch contains it. + /// If yes, we write it (possibly merging recursively). If not, we try to fill it from the cache. + /// + private static void WriteMergedObject( + JsonWriter writer, + OperationResultFormatterContext context, + Path path, + JsonElement patchObject, + DeferSelectionTree selection) + { + writer.WriteStartObject(); + + for (var i = 0; i < selection.Fields.Count; i++) + { + var field = selection.Fields[i]; + var fieldPath = path.Append(field.Name); + + // The patch contains this field, so we write it, merging with cached data if needed. + if (patchObject.TryGetProperty(field.Name, out var patchValue)) + { + writer.WritePropertyName(field.Name); + WriteMergedFieldValue(writer, context, fieldPath, patchValue, field.Selection); + } + else + { + // The patch does not contain this field, so we try to fill it from the cache. + TryWriteMissingField(writer, context, field.Name, fieldPath, field.Selection); + } + } + + writer.WriteEndObject(); + } + + /// + /// Writes a single field value during the merge. + /// For leaf fields (no sub-selections), we prefer the cached value from the initial result + /// because the server may have deduplicated it from the patch. + /// For composite fields (objects or arrays), we recurse to merge nested data. + /// + private static void WriteMergedFieldValue( + JsonWriter writer, + OperationResultFormatterContext context, + Path path, + JsonElement patchValue, + DeferSelectionTree selection) + { + // Leaf field: prefer the cached value; fall back to the patch value. + if (!selection.HasFields) + { + if (!TryWriteCachedValue(writer, context, path)) + { + WriteRawElement(writer, context, patchValue); + } + + return; + } + + switch (patchValue.ValueKind) + { + case JsonValueKind.Object: + WriteMergedObject(writer, context, path, patchValue, selection); + break; + + case JsonValueKind.Array: + WriteMergedArray(writer, context, path, patchValue, selection); + break; + + default: + if (!TryWriteCachedValue(writer, context, path)) + { + WriteRawElement(writer, context, patchValue); + } + + break; + } + } + + private static void WriteMergedArray( + JsonWriter writer, + OperationResultFormatterContext context, + Path path, + JsonElement patchArray, + DeferSelectionTree selection) + { + writer.WriteStartArray(); + + var index = 0; + foreach (var item in patchArray.EnumerateArray()) + { + var itemPath = path.Append(index++); + + if (item.ValueKind is JsonValueKind.Object) + { + WriteMergedObject(writer, context, itemPath, item, selection); + } + else if (item.ValueKind is JsonValueKind.Array) + { + WriteMergedArray(writer, context, itemPath, item, selection); + } + else if (!TryWriteCachedValue(writer, context, itemPath)) + { + WriteRawElement(writer, context, item); + } + } + + writer.WriteEndArray(); + } + + /// + /// Tries to write a field that was not present in the patch by looking it up in the cache. + /// For leaf fields it writes the cached value directly. For arrays it writes the whole + /// cached array. For objects it recursively rebuilds from individually cached child fields. + /// + private static bool TryWriteMissingField( + JsonWriter writer, + OperationResultFormatterContext context, + string fieldName, + Path fieldPath, + DeferSelectionTree fieldSelection) + { + // In the case of a leaf field we write the cached value directly. + if (!fieldSelection.HasFields) + { + if (!context.CachedDataByPath.TryGetValue(fieldPath, out var cachedValue)) + { + return false; + } + + writer.WritePropertyName(fieldName); + writer.WriteRawValue(cachedValue.Segment.Span); + return true; + } + + // For Array field we write the whole cached array as raw bytes. + if (context.CachedDataByPath.TryGetValue(fieldPath, out var cached) + && cached.ValueKind is JsonValueKind.Array) + { + writer.WritePropertyName(fieldName); + writer.WriteRawValue(cached.Segment.Span); + return true; + } + + // With object fields we recursively rebuild from individually cached child fields. + if (!HasAnyCachedFieldData(context, fieldPath, fieldSelection)) + { + return false; + } + + writer.WritePropertyName(fieldName); + WriteMergedObjectFromCache(writer, context, fieldPath, fieldSelection); + return true; + } + + private static void WriteMergedObjectFromCache( + JsonWriter writer, + OperationResultFormatterContext context, + Path path, + DeferSelectionTree selection) + { + writer.WriteStartObject(); + + for (var i = 0; i < selection.Fields.Count; i++) + { + var field = selection.Fields[i]; + var fieldPath = path.Append(field.Name); + TryWriteMissingField(writer, context, field.Name, fieldPath, field.Selection); + } + + writer.WriteEndObject(); + } + + private static bool HasAnyCachedFieldData( + OperationResultFormatterContext context, + Path path, + DeferSelectionTree selection) + { + if (context.CachedDataByPath.ContainsKey(path)) + { + return true; + } + + for (var i = 0; i < selection.Fields.Count; i++) + { + var field = selection.Fields[i]; + var fieldPath = path.Append(field.Name); + + if (context.CachedDataByPath.ContainsKey(fieldPath) + || (field.Selection.HasFields && HasAnyCachedFieldData(context, fieldPath, field.Selection))) + { + return true; + } + } + + return false; + } + + private static bool TryWriteCachedValue( + JsonWriter writer, + OperationResultFormatterContext context, + Path path) + { + if (context.CachedDataByPath.TryGetValue(path, out var cachedValue)) + { + writer.WriteRawValue(cachedValue.Segment.Span); + return true; + } + + return false; + } + + private static void CaptureResultData( + OperationResultFormatterContext context, + Path path, + OperationResultData? data) + { + if (!data.HasValue || data.Value.IsValueNull) + { + return; + } + + using var document = SerializeDataToCache(context, data.Value); + CaptureElement(context, path, document.RootElement); + } + + private static void CaptureElement( + OperationResultFormatterContext context, + Path path, + JsonElement element) + { + var segment = WriteElementToCache(context.CacheBuffer, element); + context.CachedDataByPath[path] = new CachedJsonValue(segment, element.ValueKind); + + if (element.ValueKind is JsonValueKind.Object) + { + foreach (var property in element.EnumerateObject()) + { + CaptureElement(context, path.Append(property.Name), property.Value); + } + } + else if (element.ValueKind is JsonValueKind.Array && !path.IsRoot) + { + var index = 0; + foreach (var item in element.EnumerateArray()) + { + CaptureElement(context, path.Append(index++), item); + } + } + } + + private static JsonDocument SerializeDataToCache(OperationResultFormatterContext context, OperationResultData data) + { + var cacheBuffer = context.CacheBuffer; + var start = cacheBuffer.Length; + var writer = new JsonWriter(cacheBuffer, s_jsonWriterOptions); + data.Formatter.WriteDataTo(writer); + var segment = cacheBuffer.GetWrittenMemorySegment(start, cacheBuffer.Length - start); + return JsonDocument.Parse(segment.Memory); + } + + private static ReadOnlyMemorySegment WriteElementToCache( + PooledArrayWriter cacheBuffer, + JsonElement element) + { + var start = cacheBuffer.Length; + + using var writer = new Utf8JsonWriter(cacheBuffer); + element.WriteTo(writer); + writer.Flush(); + + return cacheBuffer.GetWrittenMemorySegment(start, cacheBuffer.Length - start); + } + + /// + /// Converts a into raw bytes and writes them to the output. + /// Uses the scratch buffer as temporary space; it is reset before each use. + /// + private static void WriteRawElement( + JsonWriter writer, + OperationResultFormatterContext context, + JsonElement element) + { + var buffer = context.ScratchBuffer; + buffer.Reset(); + + using (var utf8Writer = new Utf8JsonWriter(buffer)) + { + element.WriteTo(utf8Writer); + } + + writer.WriteRawValue(buffer.WrittenSpan); + } + private static void WriteLegacyIncrementalEntry( JsonWriter writer, LegacyIncrementalEntry entry, @@ -123,7 +573,11 @@ private static void WriteLegacyIncrementalEntry( case LegacyIncrementalEntryKind.Data: writer.WritePropertyName(ResultFieldNames.Data); - if (entry.Data.HasValue) + if (entry.RawData.HasValue) + { + writer.WriteRawValue(entry.RawData.Value.Span); + } + else if (entry.Data.HasValue) { entry.Data.Value.Formatter.WriteDataTo(writer); } @@ -194,6 +648,7 @@ private readonly record struct LegacyIncrementalEntry( Path Path, string? Label, OperationResultData? Data, + ReadOnlyMemorySegment? RawData, IReadOnlyList? Items, IReadOnlyList? Errors) { @@ -201,14 +656,262 @@ public static LegacyIncrementalEntry ForData( Path path, string? label, OperationResultData? data, - IReadOnlyList? errors) - => new(LegacyIncrementalEntryKind.Data, path, label, data, null, errors); + IReadOnlyList? errors, + ReadOnlyMemorySegment? rawData = null) + => new(LegacyIncrementalEntryKind.Data, path, label, data, rawData, null, errors); public static LegacyIncrementalEntry ForItems( Path path, string? label, IReadOnlyList? items, IReadOnlyList? errors) - => new(LegacyIncrementalEntryKind.Items, path, label, null, items, errors); + => new(LegacyIncrementalEntryKind.Items, path, label, null, null, items, errors); + } +} + +internal sealed class DeferSelectionLookup +{ + private readonly Dictionary _selectionsByLabel; + private readonly Dictionary> _selectionsByPath; + + private DeferSelectionLookup( + Dictionary selectionsByLabel, + Dictionary> selectionsByPath) + { + _selectionsByLabel = selectionsByLabel; + _selectionsByPath = selectionsByPath; + } + + /// + /// Walks the query document (after fragment inlining) and collects every @defer directive + /// together with its selected fields. The result is indexed by label and by path + /// so the format adapter can quickly look up which fields a deferred fragment expects. + /// + public static DeferSelectionLookup Create(DocumentNode document) + { + ArgumentNullException.ThrowIfNull(document); + + var selectionsByLabel = new Dictionary(StringComparer.Ordinal); + var selectionsByPath = new Dictionary>(); + + for (var i = 0; i < document.Definitions.Count; i++) + { + if (document.Definitions[i] is not OperationDefinitionNode operation) + { + continue; + } + + CollectDeferredSelections(operation.SelectionSet, Path.Root, selectionsByLabel, selectionsByPath); + } + + return new DeferSelectionLookup(selectionsByLabel, selectionsByPath); + } + + public bool TryResolveRoot( + Path path, + string? label, + out Path selectionPath, + out DeferSelectionTree selection) + { + if (!string.IsNullOrEmpty(label) + && _selectionsByLabel.TryGetValue(label!, out var labeledSelection)) + { + selectionPath = labeledSelection.Path; + selection = labeledSelection.Selection; + return true; + } + + if (_selectionsByPath.TryGetValue(path, out var selections)) + { + if (!string.IsNullOrEmpty(label)) + { + for (var i = 0; i < selections.Count; i++) + { + var candidate = selections[i]; + + if (string.Equals(candidate.Label, label, StringComparison.Ordinal)) + { + selectionPath = candidate.Path; + selection = candidate.Selection; + return true; + } + } + } + else + { + for (var i = 0; i < selections.Count; i++) + { + var candidate = selections[i]; + + if (candidate.Label is null) + { + selectionPath = candidate.Path; + selection = candidate.Selection; + return true; + } + } + + if (selections.Count == 1) + { + selectionPath = selections[0].Path; + selection = selections[0].Selection; + return true; + } + } + } + + selectionPath = Path.Root; + selection = default!; + return false; + } + + private static void CollectDeferredSelections( + SelectionSetNode selectionSet, + Path currentPath, + Dictionary selectionsByLabel, + Dictionary> selectionsByPath) + { + for (var i = 0; i < selectionSet.Selections.Count; i++) + { + switch (selectionSet.Selections[i]) + { + case FieldNode fieldNode when fieldNode.SelectionSet is not null: + CollectDeferredSelections( + fieldNode.SelectionSet, + currentPath.Append(GetResponseName(fieldNode)), + selectionsByLabel, + selectionsByPath); + break; + + case InlineFragmentNode inlineFragmentNode: + if (TryGetDeferLabel(inlineFragmentNode, out var label)) + { + var selection = new DeferSelectionTree(); + CollectFields(inlineFragmentNode.SelectionSet, selection); + + if (selection.HasFields) + { + if (!string.IsNullOrEmpty(label)) + { + selectionsByLabel[label] = new DeferredSelection(currentPath, label, selection); + } + + if (!selectionsByPath.TryGetValue(currentPath, out var selections)) + { + selections = []; + selectionsByPath.Add(currentPath, selections); + } + + selections.Add(new DeferredSelection(currentPath, label, selection)); + } + } + + CollectDeferredSelections( + inlineFragmentNode.SelectionSet, + currentPath, + selectionsByLabel, + selectionsByPath); + break; + } + } + } + + private static void CollectFields(SelectionSetNode selectionSet, DeferSelectionTree selection) + { + for (var i = 0; i < selectionSet.Selections.Count; i++) + { + switch (selectionSet.Selections[i]) + { + case FieldNode fieldNode: + var childSelection = selection.GetOrAddField(GetResponseName(fieldNode)); + + if (fieldNode.SelectionSet is not null) + { + CollectFields(fieldNode.SelectionSet, childSelection); + } + + break; + + case InlineFragmentNode inlineFragmentNode when !HasDeferDirective(inlineFragmentNode.Directives): + CollectFields(inlineFragmentNode.SelectionSet, selection); + break; + } + } + } + + private static string GetResponseName(FieldNode field) + => field.Alias?.Value ?? field.Name.Value; + + private static bool TryGetDeferLabel(InlineFragmentNode node, out string? label) + { + label = null; + + for (var i = 0; i < node.Directives.Count; i++) + { + var directive = node.Directives[i]; + + if (!directive.Name.Value.Equals("defer", StringComparison.Ordinal)) + { + continue; + } + + for (var j = 0; j < directive.Arguments.Count; j++) + { + var argument = directive.Arguments[j]; + + if (argument.Name.Value.Equals("label", StringComparison.Ordinal) + && argument.Value is StringValueNode stringValue) + { + label = stringValue.Value; + break; + } + } + + return true; + } + + return false; + } + + private static bool HasDeferDirective(IReadOnlyList directives) + { + for (var i = 0; i < directives.Count; i++) + { + if (directives[i].Name.Value.Equals("defer", StringComparison.Ordinal)) + { + return true; + } + } + + return false; + } + + private readonly record struct DeferredSelection(Path Path, string? Label, DeferSelectionTree Selection); +} + +internal sealed class DeferSelectionTree +{ + private readonly Dictionary _fieldsByName = new(StringComparer.Ordinal); + private readonly List _fields = []; + + public IReadOnlyList Fields => _fields; + + public bool HasFields => _fields.Count > 0; + + public bool TryGetField(string responseName, out DeferSelectionTree selection) + => _fieldsByName.TryGetValue(responseName, out selection!); + + public DeferSelectionTree GetOrAddField(string responseName) + { + if (!_fieldsByName.TryGetValue(responseName, out var selection)) + { + selection = new DeferSelectionTree(); + _fieldsByName[responseName] = selection; + _fields.Add(new DeferSelectionField(responseName, selection)); + } + + return selection; } } + +internal readonly record struct DeferSelectionField(string Name, DeferSelectionTree Selection); diff --git a/src/HotChocolate/AspNetCore/src/Transport.Formatters/JsonLinesResultFormatter.cs b/src/HotChocolate/AspNetCore/src/Transport.Formatters/JsonLinesResultFormatter.cs index 935d6d39115..9234eaeb545 100644 --- a/src/HotChocolate/AspNetCore/src/Transport.Formatters/JsonLinesResultFormatter.cs +++ b/src/HotChocolate/AspNetCore/src/Transport.Formatters/JsonLinesResultFormatter.cs @@ -67,6 +67,7 @@ private async ValueTask FormatOperationResultAsync( } finally { + formatContext?.Dispose(); scope?.Dispose(); } } @@ -100,14 +101,21 @@ private async ValueTask FormatResultBatchAsync( try { OperationResultFormatterContext? formatContext = null; - MessageHelper.FormatNextMessage( - _payloadFormatter, - operationResult, - writer, - useIncrementalRfc1, - ref formatContext); - await writer.FlushAsync(ct).ConfigureAwait(false); - keepAlive?.Reset(); + try + { + MessageHelper.FormatNextMessage( + _payloadFormatter, + operationResult, + writer, + useIncrementalRfc1, + ref formatContext); + await writer.FlushAsync(ct).ConfigureAwait(false); + keepAlive?.Reset(); + } + finally + { + formatContext?.Dispose(); + } } catch (Exception ex) { @@ -272,6 +280,7 @@ public async Task ProcessAsync(CancellationToken ct) } finally { + _formatContext?.Dispose(); await responseStream.DisposeAsync().ConfigureAwait(false); } } diff --git a/src/HotChocolate/AspNetCore/src/Transport.Formatters/JsonResultFormatter.cs b/src/HotChocolate/AspNetCore/src/Transport.Formatters/JsonResultFormatter.cs index dcccfa95c40..58deb34f8c8 100644 --- a/src/HotChocolate/AspNetCore/src/Transport.Formatters/JsonResultFormatter.cs +++ b/src/HotChocolate/AspNetCore/src/Transport.Formatters/JsonResultFormatter.cs @@ -92,7 +92,14 @@ public void Format(OperationResult result, IBufferWriter writer) ArgumentNullException.ThrowIfNull(writer); OperationResultFormatterContext? context = null; - FormatInternal(result, writer, useIncrementalRfc1: false, ref context); + try + { + FormatInternal(result, writer, useIncrementalRfc1: false, ref context); + } + finally + { + context?.Dispose(); + } } internal void Format( @@ -100,9 +107,7 @@ internal void Format( IBufferWriter writer, bool useIncrementalRfc1, ref OperationResultFormatterContext? context) - { - FormatInternal(result, writer, useIncrementalRfc1, ref context); - } + => FormatInternal(result, writer, useIncrementalRfc1, ref context); public ValueTask FormatAsync( OperationResult result, @@ -112,8 +117,7 @@ public ValueTask FormatAsync( ArgumentNullException.ThrowIfNull(result); ArgumentNullException.ThrowIfNull(writer); - OperationResultFormatterContext? context = null; - return FormatInternalAsync(result, writer, useIncrementalRfc1: false, ref context, cancellationToken); + return FormatSingleResultAsync(result, writer, cancellationToken); } internal ValueTask FormatAsync( @@ -200,14 +204,29 @@ private ValueTask FormatInternalAsync( PipeWriter writer, bool useIncrementalRfc1, CancellationToken cancellationToken) + => FormatSingleResultAsync(result, writer, cancellationToken, useIncrementalRfc1); + + private async ValueTask FormatSingleResultAsync( + OperationResult result, + PipeWriter writer, + CancellationToken cancellationToken, + bool useIncrementalRfc1 = false) { OperationResultFormatterContext? context = null; - return FormatInternalAsync( - result, - writer, - useIncrementalRfc1, - ref context, - cancellationToken); + + try + { + await FormatInternalAsync( + result, + writer, + useIncrementalRfc1, + ref context, + cancellationToken).ConfigureAwait(false); + } + finally + { + context?.Dispose(); + } } private ValueTask FormatInternalAsync( @@ -236,26 +255,41 @@ private async ValueTask FormatInternalAsync( { case OperationResult singleResult: OperationResultFormatterContext? singleContext = null; - FormatInternal(singleResult, writer, useIncrementalRfc1, ref singleContext); + + try + { + FormatInternal(singleResult, writer, useIncrementalRfc1, ref singleContext); + } + finally + { + singleContext?.Dispose(); + } + break; case IResponseStream batchResult: OperationResultFormatterContext? streamContext = null; - - await foreach (var partialResult in batchResult.ReadResultsAsync() - .WithCancellation(cancellationToken) - .ConfigureAwait(false)) + try { - try + await foreach (var partialResult in batchResult.ReadResultsAsync() + .WithCancellation(cancellationToken) + .ConfigureAwait(false)) { - FormatInternal(partialResult, writer, useIncrementalRfc1, ref streamContext); - await writer.FlushAsync(cancellationToken).ConfigureAwait(false); - } - finally - { - await partialResult.DisposeAsync().ConfigureAwait(false); + try + { + FormatInternal(partialResult, writer, useIncrementalRfc1, ref streamContext); + await writer.FlushAsync(cancellationToken).ConfigureAwait(false); + } + finally + { + await partialResult.DisposeAsync().ConfigureAwait(false); + } } } + finally + { + streamContext?.Dispose(); + } break; } @@ -271,20 +305,26 @@ private async ValueTask FormatInternalAsync( CancellationToken cancellationToken = default) { OperationResultFormatterContext? context = null; - - await foreach (var partialResult in responseStream.ReadResultsAsync() - .WithCancellation(cancellationToken) - .ConfigureAwait(false)) + try { - try + await foreach (var partialResult in responseStream.ReadResultsAsync() + .WithCancellation(cancellationToken) + .ConfigureAwait(false)) { - FormatInternal(partialResult, writer, useIncrementalRfc1, ref context); - await writer.FlushAsync(cancellationToken).ConfigureAwait(false); - } - finally - { - await partialResult.DisposeAsync().ConfigureAwait(false); + try + { + FormatInternal(partialResult, writer, useIncrementalRfc1, ref context); + await writer.FlushAsync(cancellationToken).ConfigureAwait(false); + } + finally + { + await partialResult.DisposeAsync().ConfigureAwait(false); + } } } + finally + { + context?.Dispose(); + } } } diff --git a/src/HotChocolate/AspNetCore/src/Transport.Formatters/MultiPartResultFormatter.cs b/src/HotChocolate/AspNetCore/src/Transport.Formatters/MultiPartResultFormatter.cs index e27ff3b5861..5c2a1147101 100644 --- a/src/HotChocolate/AspNetCore/src/Transport.Formatters/MultiPartResultFormatter.cs +++ b/src/HotChocolate/AspNetCore/src/Transport.Formatters/MultiPartResultFormatter.cs @@ -82,23 +82,30 @@ private async ValueTask FormatOperationResultAsync( CancellationToken ct = default) { OperationResultFormatterContext? formatContext = null; - MessageHelper.WriteNext(writer); + try + { + MessageHelper.WriteNext(writer); - // First, we write the header of the part. - MessageHelper.WriteResultHeader(writer); + // First, we write the header of the part. + MessageHelper.WriteResultHeader(writer); - // Next, we write the payload of the part. - MessageHelper.WritePayload( - writer, - result, - _payloadFormatter, - useIncrementalRfc1, - ref formatContext); + // Next, we write the payload of the part. + MessageHelper.WritePayload( + writer, + result, + _payloadFormatter, + useIncrementalRfc1, + ref formatContext); - // Last we write the end of the part. - MessageHelper.WriteEnd(writer); + // Last we write the end of the part. + MessageHelper.WriteEnd(writer); - await writer.FlushAsync(ct).ConfigureAwait(false); + await writer.FlushAsync(ct).ConfigureAwait(false); + } + finally + { + formatContext?.Dispose(); + } } private async ValueTask FormatResultBatchAsync( @@ -115,16 +122,23 @@ private async ValueTask FormatResultBatchAsync( try { OperationResultFormatterContext? formatContext = null; - MessageHelper.WriteNext(writer); - MessageHelper.WriteResultHeader(writer); - MessageHelper.WritePayload( - writer, - operationResult, - _payloadFormatter, - useIncrementalRfc1, - ref formatContext); - MessageHelper.WriteEnd(writer); - await writer.FlushAsync(ct).ConfigureAwait(false); + try + { + MessageHelper.WriteNext(writer); + MessageHelper.WriteResultHeader(writer); + MessageHelper.WritePayload( + writer, + operationResult, + _payloadFormatter, + useIncrementalRfc1, + ref formatContext); + MessageHelper.WriteEnd(writer); + await writer.FlushAsync(ct).ConfigureAwait(false); + } + finally + { + formatContext?.Dispose(); + } } finally { @@ -155,46 +169,53 @@ private async ValueTask FormatResponseStreamAsync( OperationResultFormatterContext? formatContext = null; var first = true; - while (await enumerator.MoveNextAsync().ConfigureAwait(false)) + try { - var current = enumerator.Current; - - try + while (await enumerator.MoveNextAsync().ConfigureAwait(false)) { - if (first || responseStream.Kind is not DeferredResult) + var current = enumerator.Current; + + try { - MessageHelper.WriteNext(writer); - first = false; - } + if (first || responseStream.Kind is not DeferredResult) + { + MessageHelper.WriteNext(writer); + first = false; + } - // First, we write the header of the part. - MessageHelper.WriteResultHeader(writer); + // First, we write the header of the part. + MessageHelper.WriteResultHeader(writer); - // Next, we write the payload of the part. - MessageHelper.WritePayload( - writer, - current, - _payloadFormatter, - useIncrementalRfc1, - ref formatContext); + // Next, we write the payload of the part. + MessageHelper.WritePayload( + writer, + current, + _payloadFormatter, + useIncrementalRfc1, + ref formatContext); + + if (responseStream.Kind is DeferredResult && (current.HasNext ?? false)) + { + // If the result is a deferred result and has a next result, we need to + // write a new part so that the client knows that there is more to come. + MessageHelper.WriteNext(writer); + } - if (responseStream.Kind is DeferredResult && (current.HasNext ?? false)) + // Now we can write the part to the output stream and flush this chunk. + await writer.FlushAsync(ct).ConfigureAwait(false); + } + finally { - // If the result is a deferred result and has a next result, we need to - // write a new part so that the client knows that there is more to come. - MessageHelper.WriteNext(writer); + // The result objects use pooled memory, so we need to ensure that they + // return the memory by disposing them. + await current.DisposeAsync().ConfigureAwait(false); } - - // Now we can write the part to the output stream and flush this chunk. - await writer.FlushAsync(ct).ConfigureAwait(false); - } - finally - { - // The result objects use pooled memory, so we need to ensure that they - // return the memory by disposing them. - await current.DisposeAsync().ConfigureAwait(false); } } + finally + { + formatContext?.Dispose(); + } // After all parts have been written, we need to write the final boundary. MessageHelper.WriteEnd(writer); diff --git a/src/HotChocolate/AspNetCore/src/Transport.Formatters/OperationResultFormatterContext.cs b/src/HotChocolate/AspNetCore/src/Transport.Formatters/OperationResultFormatterContext.cs index 8c3313968d3..01225a8af8e 100644 --- a/src/HotChocolate/AspNetCore/src/Transport.Formatters/OperationResultFormatterContext.cs +++ b/src/HotChocolate/AspNetCore/src/Transport.Formatters/OperationResultFormatterContext.cs @@ -1,11 +1,60 @@ +using HotChocolate.Buffers; +using HotChocolate.Language; +using System.Text.Json; + namespace HotChocolate.Transport.Formatters; -internal sealed class OperationResultFormatterContext +internal sealed class OperationResultFormatterContext : IDisposable { private Dictionary? _pendingResults; + private Dictionary? _cachedDataByPath; + private PooledArrayWriter? _cacheBuffer; + private PooledArrayWriter? _scratchBuffer; + private DeferSelectionLookup? _deferSelectionLookup; + private bool _disposed; public Dictionary PendingResults => _pendingResults ??= []; + + public Dictionary CachedDataByPath + => _cachedDataByPath ??= []; + + public PooledArrayWriter CacheBuffer + => _cacheBuffer ??= new(); + + public PooledArrayWriter ScratchBuffer + => _scratchBuffer ??= new(); + + public DeferSelectionLookup? DeferSelectionLookup + => _deferSelectionLookup; + + public void InitializeDocument(DocumentNode? document) + { + if (_deferSelectionLookup is not null || document is null) + { + return; + } + + _deferSelectionLookup = DeferSelectionLookup.Create(document); + } + + public void Dispose() + { + if (_disposed) + { + return; + } + + _cacheBuffer?.Dispose(); + _cacheBuffer = null; + _scratchBuffer?.Dispose(); + _scratchBuffer = null; + _disposed = true; + } } internal readonly record struct PendingResultState(Path? Path, string? Label); + +internal readonly record struct CachedJsonValue( + ReadOnlyMemorySegment Segment, + JsonValueKind ValueKind); diff --git a/src/HotChocolate/AspNetCore/test/AspNetCore.Tests/DeferOverHttpTests.cs b/src/HotChocolate/AspNetCore/test/AspNetCore.Tests/DeferOverHttpTests.cs index a62e5802486..84f65573e0f 100644 --- a/src/HotChocolate/AspNetCore/test/AspNetCore.Tests/DeferOverHttpTests.cs +++ b/src/HotChocolate/AspNetCore/test/AspNetCore.Tests/DeferOverHttpTests.cs @@ -1091,6 +1091,208 @@ ... on Droid @defer(label: "droid_details") { """); } + [Fact] + public async Task Defer_Overlap_Multipart_Legacy_Format() + { + // arrange + using var server = CreateDeferServer(); + var client = server.CreateClient(); + + // act + using var request = new HttpRequestMessage(HttpMethod.Post, "/graphql"); + request.Content = JsonContent.Create(new + { + query = """ + { + product { + name + description + } + ... @defer(label: "foo") { + product { + name + description + reviews { + rating + } + } + } + } + """ + }); + request.Headers.Add("Accept", "multipart/mixed; incrementalSpec=v0.1"); + + using var response = await client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead); + + // assert + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + Assert.Equal("multipart/mixed", response.Content.Headers.ContentType?.MediaType); + + var content = await response.Content.ReadAsStringAsync(); + + Snapshot + .Create() + .Add(content, "Response") + .MatchInline( + """ + + --- + Content-Type: application/json; charset=utf-8 + + {"data":{"product":{"name":"Abc","description":"Abc desc"}},"hasNext":true} + --- + Content-Type: application/json; charset=utf-8 + + {"incremental":[{"data":{"name":"Abc","description":"Abc desc","reviews":[{"rating":5}]},"path":["product"],"label":"foo"}],"hasNext":false} + ----- + + """); + } + + [Fact] + public async Task Defer_Overlap_EventStream_Legacy_Format() + { + // arrange + using var server = CreateDeferServer(); + var client = server.CreateClient(); + + // act + using var request = new HttpRequestMessage(HttpMethod.Post, "/graphql"); + request.Content = JsonContent.Create(new + { + query = """ + { + product { + name + description + } + ... @defer(label: "foo") { + product { + name + description + reviews { + rating + } + } + } + } + """ + }); + request.Headers.Add("Accept", "text/event-stream; incrementalSpec=v0.1"); + + using var response = await client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead); + + // assert + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + Assert.Equal("text/event-stream", response.Content.Headers.ContentType?.MediaType); + + var content = await response.Content.ReadAsStringAsync(); + + Assert.Contains( + "\"data\":{\"product\":{\"name\":\"Abc\",\"description\":\"Abc desc\"}}", + content, + StringComparison.Ordinal); + Assert.Contains( + "\"incremental\":[{\"data\":{\"name\":\"Abc\",\"description\":\"Abc desc\",\"reviews\":[{\"rating\":5}]},\"path\":[\"product\"],\"label\":\"foo\"}]", + content, + StringComparison.Ordinal); + Assert.Contains("event: complete", content, StringComparison.Ordinal); + } + + [Fact] + public async Task Defer_Overlap_JsonLines_Legacy_Format() + { + // arrange + using var server = CreateDeferServer(); + var client = server.CreateClient(); + + // act + using var request = new HttpRequestMessage(HttpMethod.Post, "/graphql"); + request.Content = JsonContent.Create(new + { + query = """ + { + product { + name + description + } + ... @defer(label: "foo") { + product { + name + description + reviews { + rating + } + } + } + } + """ + }); + request.Headers.Add("Accept", "application/jsonl; incrementalSpec=v0.1"); + + using var response = await client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead); + + // assert + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + Assert.Equal("application/jsonl", response.Content.Headers.ContentType?.MediaType); + + var content = await response.Content.ReadAsStringAsync(); + + Snapshot + .Create() + .Add(content, "Response") + .MatchInline( + """ + {"data":{"product":{"name":"Abc","description":"Abc desc"}},"hasNext":true} + {"incremental":[{"data":{"name":"Abc","description":"Abc desc","reviews":[{"rating":5}]},"path":["product"],"label":"foo"}],"hasNext":false} + + """); + } + + [Fact] + public async Task Defer_Two_Labels_Shared_Field_Multipart_Legacy_Format() + { + // arrange + using var server = CreateDeferServer(); + var client = server.CreateClient(); + + // act + using var request = new HttpRequestMessage(HttpMethod.Post, "/graphql"); + request.Content = JsonContent.Create(new + { + query = """ + { + ... @defer(label: "a") { + product { + name + } + } + ... @defer(label: "b") { + product { + name + } + } + } + """ + }); + request.Headers.Add("Accept", "multipart/mixed; incrementalSpec=v0.1"); + + using var response = await client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead); + + // assert + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + Assert.Equal("multipart/mixed", response.Content.Headers.ContentType?.MediaType); + + var content = await response.Content.ReadAsStringAsync(); + + Assert.Contains("\"label\":\"a\"", content, StringComparison.Ordinal); + Assert.Contains("\"label\":\"b\"", content, StringComparison.Ordinal); + Assert.Contains("\"data\":{\"product\":{\"name\":\"Abc\"}}", content, StringComparison.Ordinal); + Assert.True( + content.Split("\"name\":\"Abc\"", StringSplitOptions.None).Length - 1 >= 2, + "Expected both labeled deferred payloads to include product.name."); + } + private TestServer CreateDeferServer( HttpTransportVersion serverTransportVersion = HttpTransportVersion.Latest) { @@ -1149,5 +1351,13 @@ public async Task GetDescriptionAsync() await Task.Delay(1000); return Name + " desc"; } + + public async Task> GetReviewsAsync() + { + await Task.Delay(1000); + return new[] { new Review(5) }; + } } + + public sealed record Review(int Rating); } diff --git a/src/HotChocolate/AspNetCore/test/AspNetCore.Tests/Serialization/DeferredResultFormatterTests.cs b/src/HotChocolate/AspNetCore/test/AspNetCore.Tests/Serialization/DeferredResultFormatterTests.cs index 3c97ee2ba56..bd13e2e350e 100644 --- a/src/HotChocolate/AspNetCore/test/AspNetCore.Tests/Serialization/DeferredResultFormatterTests.cs +++ b/src/HotChocolate/AspNetCore/test/AspNetCore.Tests/Serialization/DeferredResultFormatterTests.cs @@ -3,6 +3,7 @@ using System.Text.Json; using HotChocolate.Collections.Immutable; using HotChocolate.Execution; +using HotChocolate.Language; using HotChocolate.Text.Json; using HotChocolate.Transport.Formatters; @@ -139,6 +140,405 @@ public async Task JsonLines_Formats_Legacy_Defer_Structure() content); } + [Fact] + public async Task Legacy_Merges_Parent_And_Defer_Overlap() + { + var document = ParseDocument( + """ + { + product { + name + description + } + ... @defer(label: "foo") { + product { + name + description + reviews { + rating + } + } + } + } + """); + + var initial = CreateInitialResult( + document, + new Dictionary + { + ["product"] = new Dictionary + { + ["name"] = "Abc", + ["description"] = "Abc desc" + } + }, + hasNext: true, + new PendingResult(2, Path.Root, "foo")); + + var incremental = CreateIncrementalEnvelope( + document, + new IncrementalObjectResult( + 2, + data: CreateData( + new Dictionary + { + ["product"] = new Dictionary + { + ["reviews"] = new[] + { + new Dictionary + { + ["rating"] = 5 + } + } + } + })), + hasNext: false, + completedId: 2); + + var lines = await FormatLegacyJsonLinesAsync(initial, incremental); + + Assert.Equal( + [ + """{"data":{"product":{"name":"Abc","description":"Abc desc"}},"hasNext":true}""", + """{"incremental":[{"data":{"product":{"name":"Abc","description":"Abc desc","reviews":[{"rating":5}]}},"path":[],"label":"foo"}],"hasNext":false}""" + ], + lines); + } + + [Fact] + public async Task Legacy_Merges_Two_Defers_With_Same_Fields() + { + var document = ParseDocument( + """ + { + ... @defer(label: "a") { + product { + name + } + } + ... @defer(label: "b") { + product { + name + } + } + } + """); + + var initial = CreateInitialResult( + document, + new Dictionary(), + hasNext: true, + new PendingResult(2, Path.Root, "a"), + new PendingResult(3, Path.Root, "b")); + + var incrementalA = CreateIncrementalEnvelope( + document, + new IncrementalObjectResult( + 2, + data: CreateData( + new Dictionary + { + ["product"] = new Dictionary + { + ["name"] = "Abc" + } + })), + hasNext: true, + completedId: 2); + + var incrementalB = CreateIncrementalEnvelope( + document, + new IncrementalObjectResult( + 3, + data: CreateData( + new Dictionary + { + ["product"] = new Dictionary() + })), + hasNext: false, + completedId: 3); + + var lines = await FormatLegacyJsonLinesAsync(initial, incrementalA, incrementalB); + + Assert.Equal( + new[] + { + """{"data":{},"hasNext":true}""", + """{"incremental":[{"data":{"product":{"name":"Abc"}},"path":[],"label":"a"}],"hasNext":true}""", + """{"incremental":[{"data":{"product":{"name":"Abc"}},"path":[],"label":"b"}],"hasNext":false}""" + }, + lines); + } + + [Fact] + public async Task Legacy_Merges_Nested_Defers() + { + var document = ParseDocument( + """ + { + ... @defer(label: "outer") { + product { + name + ... @defer(label: "inner") { + name + reviews { + rating + } + } + } + } + } + """); + + var initial = CreateInitialResult( + document, + new Dictionary(), + hasNext: true, + new PendingResult(2, Path.Root, "outer")); + + var outer = CreateIncrementalEnvelope( + document, + new IncrementalObjectResult( + 2, + data: CreateData( + new Dictionary + { + ["product"] = new Dictionary + { + ["name"] = "Abc" + } + })), + hasNext: true, + completedId: 2, + new PendingResult(3, Path.Root.Append("product"), "inner")); + + var inner = CreateIncrementalEnvelope( + document, + new IncrementalObjectResult( + 3, + data: CreateData( + new Dictionary + { + ["reviews"] = new[] + { + new Dictionary + { + ["rating"] = 5 + } + } + })), + hasNext: false, + completedId: 3); + + var lines = await FormatLegacyJsonLinesAsync(initial, outer, inner); + + Assert.Equal( + new[] + { + """{"data":{},"hasNext":true}""", + """{"incremental":[{"data":{"product":{"name":"Abc"}},"path":[],"label":"outer"}],"hasNext":true}""", + """{"incremental":[{"data":{"name":"Abc","reviews":[{"rating":5}]},"path":["product"],"label":"inner"}],"hasNext":false}""" + }, + lines); + } + + [Fact] + public async Task Legacy_Merges_Deep_Object_Overlap() + { + var document = ParseDocument( + """ + { + product { + details { + sku + } + } + ... @defer(label: "foo") { + product { + details { + sku + weight + } + } + } + } + """); + + var initial = CreateInitialResult( + document, + new Dictionary + { + ["product"] = new Dictionary + { + ["details"] = new Dictionary + { + ["sku"] = "SKU-1" + } + } + }, + hasNext: true, + new PendingResult(2, Path.Root, "foo")); + + var incremental = CreateIncrementalEnvelope( + document, + new IncrementalObjectResult( + 2, + data: CreateData( + new Dictionary + { + ["product"] = new Dictionary + { + ["details"] = new Dictionary + { + ["weight"] = 10 + } + } + })), + hasNext: false, + completedId: 2); + + var lines = await FormatLegacyJsonLinesAsync(initial, incremental); + + Assert.Equal( + new[] + { + """{"data":{"product":{"details":{"sku":"SKU-1"}}},"hasNext":true}""", + """{"incremental":[{"data":{"product":{"details":{"sku":"SKU-1","weight":10}}},"path":[],"label":"foo"}],"hasNext":false}""" + }, + lines); + } + + [Fact] + public async Task Legacy_Duplicates_Null_Values() + { + var document = ParseDocument( + """ + { + product { + name + description + } + ... @defer(label: "foo") { + product { + name + description + reviews { + rating + } + } + } + } + """); + + var initial = CreateInitialResult( + document, + new Dictionary + { + ["product"] = new Dictionary + { + ["name"] = "Abc", + ["description"] = null + } + }, + hasNext: true, + new PendingResult(2, Path.Root, "foo")); + + var incremental = CreateIncrementalEnvelope( + document, + new IncrementalObjectResult( + 2, + data: CreateData( + new Dictionary + { + ["product"] = new Dictionary + { + ["reviews"] = new[] + { + new Dictionary + { + ["rating"] = 5 + } + } + } + })), + hasNext: false, + completedId: 2); + + var lines = await FormatLegacyJsonLinesAsync(initial, incremental); + + Assert.Equal( + new[] + { + """{"data":{"product":{"name":"Abc","description":null}},"hasNext":true}""", + """{"incremental":[{"data":{"product":{"name":"Abc","description":null,"reviews":[{"rating":5}]}},"path":[],"label":"foo"}],"hasNext":false}""" + }, + lines); + } + + [Fact] + public async Task Legacy_No_Overlap_Behavior_Is_Unchanged() + { + var document = ParseDocument( + """ + { + product { + name + } + ... @defer(label: "foo") { + product { + reviews { + rating + } + } + } + } + """); + + var initial = CreateInitialResult( + document, + new Dictionary + { + ["product"] = new Dictionary + { + ["name"] = "Abc" + } + }, + hasNext: true, + new PendingResult(2, Path.Root, "foo")); + + var incremental = CreateIncrementalEnvelope( + document, + new IncrementalObjectResult( + 2, + data: CreateData( + new Dictionary + { + ["product"] = new Dictionary + { + ["reviews"] = new[] + { + new Dictionary + { + ["rating"] = 5 + } + } + } + })), + hasNext: false, + completedId: 2); + + var lines = await FormatLegacyJsonLinesAsync(initial, incremental); + + Assert.Equal( + new[] + { + """{"data":{"product":{"name":"Abc"}},"hasNext":true}""", + """{"incremental":[{"data":{"product":{"reviews":[{"rating":5}]}},"path":[],"label":"foo"}],"hasNext":false}""" + }, + lines); + } + private static async Task FormatAsync( IExecutionResultFormatter formatter, ExecutionResultFormatFlags flags) @@ -154,6 +554,29 @@ private static async Task FormatAsync( return Normalize(await new StreamReader(output).ReadToEndAsync()); } + private static async Task> FormatLegacyJsonLinesAsync(params OperationResult[] results) + { + await using var output = new MemoryStream(); + var writer = PipeWriter.Create(output, new StreamPipeWriterOptions(leaveOpen: true)); + var formatter = new JsonLinesResultFormatter(default); + var stream = new ResponseStream( + () => CreateResults(results), + ExecutionResultKind.DeferredResult); + + await formatter.FormatAsync( + stream, + writer, + ExecutionResultFormatFlags.IncrementalRfc1, + CancellationToken.None); + await writer.CompleteAsync(); + + output.Position = 0; + var content = await new StreamReader(output).ReadToEndAsync(); + return content + .Replace("\r\n", "\n", StringComparison.Ordinal) + .Split('\n', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + } + private static IResponseStream CreateDeferredResponseStream() => new ResponseStream( CreateResults, @@ -166,6 +589,15 @@ private static async IAsyncEnumerable CreateResults() yield return CreateIncrementalResult(); } + private static async IAsyncEnumerable CreateResults(IReadOnlyList results) + { + for (var i = 0; i < results.Count; i++) + { + yield return results[i]; + await Task.Yield(); + } + } + private static OperationResult CreateInitialResult() { var result = new OperationResult( @@ -204,6 +636,57 @@ private static OperationResult CreateIncrementalResult() return result; } + private static OperationResult CreateInitialResult( + DocumentNode document, + object data, + bool hasNext, + params PendingResult[] pending) + { + var result = new OperationResult(CreateData(data)) + { + Document = document, + HasNext = hasNext + }; + + if (pending.Length > 0) + { + result.Pending = ImmutableList.Create(pending); + } + + return result; + } + + private static OperationResult CreateIncrementalEnvelope( + DocumentNode document, + IIncrementalResult incremental, + bool hasNext, + int? completedId = null, + params PendingResult[] pending) + { + var result = new OperationResult(ImmutableOrderedDictionary.Empty.Add("__placeholder", true)) + { + Document = document, + Incremental = ImmutableList.Empty.Add(incremental), + HasNext = hasNext, + Extensions = [] + }; + + if (completedId.HasValue) + { + result.Completed = ImmutableList.Empty.Add(new CompletedResult(completedId.Value)); + } + + if (pending.Length > 0) + { + result.Pending = ImmutableList.Create(pending); + } + + return result; + } + + private static DocumentNode ParseDocument(string query) + => Utf8GraphQLParser.Parse(query); + private static OperationResultData CreateData(object value) => new(value, isValueNull: false, new DictionaryJsonFormatter(value), memoryHolder: null); diff --git a/src/HotChocolate/Core/src/Execution.Abstractions/Execution/OperationResult.cs b/src/HotChocolate/Core/src/Execution.Abstractions/Execution/OperationResult.cs index 62392d91d4c..03373d9b678 100644 --- a/src/HotChocolate/Core/src/Execution.Abstractions/Execution/OperationResult.cs +++ b/src/HotChocolate/Core/src/Execution.Abstractions/Execution/OperationResult.cs @@ -1,5 +1,6 @@ using System.Collections.Immutable; using HotChocolate.Collections.Immutable; +using HotChocolate.Language; namespace HotChocolate.Execution; @@ -125,6 +126,12 @@ public OperationResult(ImmutableOrderedDictionary extensions) /// public int? VariableIndex { get; init; } + /// + /// Gets or initializes the defragmentized operation document that produced this result. + /// This may be used by transport formatters that need document-level selection context. + /// + public DocumentNode? Document { get; init; } + /// /// Gets or sets the path to the insertion point for incremental delivery. /// Informs clients how to patch subsequent delta payloads into the original payload. diff --git a/src/HotChocolate/Core/src/Types/Execution/Extensions/OperationContextExtensions.cs b/src/HotChocolate/Core/src/Types/Execution/Extensions/OperationContextExtensions.cs index cc1695ed41b..8976f69dd6c 100644 --- a/src/HotChocolate/Core/src/Types/Execution/Extensions/OperationContextExtensions.cs +++ b/src/HotChocolate/Core/src/Types/Execution/Extensions/OperationContextExtensions.cs @@ -98,6 +98,7 @@ public OperationResult BuildResult() { RequestIndex = resultBuilder.RequestIndex > -1 ? resultBuilder.RequestIndex : null, VariableIndex = resultBuilder.VariableIndex > -1 ? resultBuilder.VariableIndex : null, + Document = context.Operation.Document, ContextData = resultBuilder.ContextData };