diff --git a/src/HotChocolate/AspNetCore/src/Transport.Abstractions/ContentType.cs b/src/HotChocolate/AspNetCore/src/Transport.Abstractions/ContentType.cs index f9801649547..986f677e394 100644 --- a/src/HotChocolate/AspNetCore/src/Transport.Abstractions/ContentType.cs +++ b/src/HotChocolate/AspNetCore/src/Transport.Abstractions/ContentType.cs @@ -1,4 +1,8 @@ +#if FUSION +namespace HotChocolate.Fusion.Transport; +#else namespace HotChocolate.Transport; +#endif /// /// This class provides the default content types for GraphQL requests and responses. diff --git a/src/HotChocolate/AspNetCore/src/Transport.Abstractions/FileReference.cs b/src/HotChocolate/AspNetCore/src/Transport.Abstractions/FileReference.cs index 36b044dea12..3de6c419d12 100644 --- a/src/HotChocolate/AspNetCore/src/Transport.Abstractions/FileReference.cs +++ b/src/HotChocolate/AspNetCore/src/Transport.Abstractions/FileReference.cs @@ -1,6 +1,10 @@ using static HotChocolate.Transport.Properties.TransportAbstractionResources; -namespace HotChocolate.Transport.Http; +#if FUSION +namespace HotChocolate.Fusion.Transport; +#else +namespace HotChocolate.Transport; +#endif /// /// A file reference can be used to upload a file with the diff --git a/src/HotChocolate/AspNetCore/src/Transport.Abstractions/FileReferenceInfo.cs b/src/HotChocolate/AspNetCore/src/Transport.Abstractions/FileReferenceInfo.cs index bc51a810d6e..339a207f957 100644 --- a/src/HotChocolate/AspNetCore/src/Transport.Abstractions/FileReferenceInfo.cs +++ b/src/HotChocolate/AspNetCore/src/Transport.Abstractions/FileReferenceInfo.cs @@ -1,4 +1,8 @@ -namespace HotChocolate.Transport.Http; +#if FUSION +namespace HotChocolate.Fusion.Transport; +#else +namespace HotChocolate.Transport; +#endif /// /// The file reference info contains the actual diff --git a/src/HotChocolate/AspNetCore/src/Transport.Abstractions/FileReferenceNode.cs b/src/HotChocolate/AspNetCore/src/Transport.Abstractions/FileReferenceNode.cs index d277d7e5ef9..89bc9c6326f 100644 --- a/src/HotChocolate/AspNetCore/src/Transport.Abstractions/FileReferenceNode.cs +++ b/src/HotChocolate/AspNetCore/src/Transport.Abstractions/FileReferenceNode.cs @@ -1,7 +1,11 @@ using HotChocolate.Language; using HotChocolate.Language.Utilities; -namespace HotChocolate.Transport.Http; +#if FUSION +namespace HotChocolate.Fusion.Transport; +#else +namespace HotChocolate.Transport; +#endif /// /// This file literal is used in order to allow for file references in . @@ -61,7 +65,11 @@ public FileReferenceNode(FileReference value) /// Gets a of this node in the parsed source text /// if available the parser provided this information. /// +#if FUSION + public HotChocolate.Language.Location? Location => null; +#else public Location? Location => null; +#endif /// /// Gets the actual file reference. diff --git a/src/HotChocolate/AspNetCore/src/Transport.Abstractions/IOperationRequest.cs b/src/HotChocolate/AspNetCore/src/Transport.Abstractions/IOperationRequest.cs index 21c589ba99f..a3686a197f5 100644 --- a/src/HotChocolate/AspNetCore/src/Transport.Abstractions/IOperationRequest.cs +++ b/src/HotChocolate/AspNetCore/src/Transport.Abstractions/IOperationRequest.cs @@ -1,6 +1,10 @@ using HotChocolate.Language; +#if FUSION +namespace HotChocolate.Fusion.Transport; +#else namespace HotChocolate.Transport; +#endif public interface IOperationRequest : IRequestBody { @@ -24,6 +28,13 @@ public interface IOperationRequest : IRequestBody /// ErrorHandlingMode? OnError { get; } +#if FUSION + /// + /// Gets an representing the extension values to include with the + /// operation. + /// + JsonSegment Extensions { get; } +#else /// /// Gets a dictionary containing extension values to include with the operation. /// @@ -34,4 +45,5 @@ public interface IOperationRequest : IRequestBody /// operation. /// ObjectValueNode? ExtensionsNode { get; } +#endif } diff --git a/src/HotChocolate/AspNetCore/src/Transport.Abstractions/IRequestBody.cs b/src/HotChocolate/AspNetCore/src/Transport.Abstractions/IRequestBody.cs index 0567288d955..a1cf9bf17c6 100644 --- a/src/HotChocolate/AspNetCore/src/Transport.Abstractions/IRequestBody.cs +++ b/src/HotChocolate/AspNetCore/src/Transport.Abstractions/IRequestBody.cs @@ -1,12 +1,27 @@ +#if FUSION +using HotChocolate.Text.Json; + +namespace HotChocolate.Fusion.Transport; +#else using System.Text.Json; namespace HotChocolate.Transport; +#endif /// /// Represents a GraphQL request body that can be sent over a WebSocket connection or HTTP connection. /// public interface IRequestBody { +#if FUSION + /// + /// Writes a serialized version of this request to a . + /// + /// + /// The JSON writer. + /// + void WriteTo(JsonWriter writer); +#else /// /// Writes a serialized version of this request to a . /// @@ -14,4 +29,5 @@ public interface IRequestBody /// The JSON writer. /// void WriteTo(Utf8JsonWriter writer); +#endif } diff --git a/src/HotChocolate/AspNetCore/src/Transport.Abstractions/OperationBatchRequest.cs b/src/HotChocolate/AspNetCore/src/Transport.Abstractions/OperationBatchRequest.cs index 3fe62b8ff02..d0c90cd9ca5 100644 --- a/src/HotChocolate/AspNetCore/src/Transport.Abstractions/OperationBatchRequest.cs +++ b/src/HotChocolate/AspNetCore/src/Transport.Abstractions/OperationBatchRequest.cs @@ -1,8 +1,17 @@ +#if FUSION +using System.Collections.Immutable; +using HotChocolate.Text.Json; +using HotChocolate.Fusion.Transport.Http; +using HotChocolate.Fusion.Transport.Serialization; + +namespace HotChocolate.Fusion.Transport; +#else using System.Collections.Immutable; using System.Text.Json; using HotChocolate.Transport.Serialization; namespace HotChocolate.Transport; +#endif /// /// Represents a GraphQL batch request that can be sent over a WebSocket or HTTP connection. @@ -11,11 +20,32 @@ public readonly struct OperationBatchRequest : IRequestBody , IEquatable { +#if FUSION /// - /// Gets the list of operation requests to execute. + /// Initializes a new instance of with the specified + /// immutable array of operation requests. /// - public ImmutableArray Requests { get; } + /// + /// The requests of this batch. + /// + /// + /// The file map entries for multipart file uploads. Default is empty. + /// + public OperationBatchRequest( + ImmutableArray requests, + ImmutableArray fileMap = default) + { + if (requests.IsDefaultOrEmpty) + { + throw new ArgumentException( + "The batch request must contain at least one operation.", + nameof(requests)); + } + Requests = requests; + FileMap = fileMap; + } +#else /// /// Initializes a new instance of with the specified /// immutable array of operation requests. @@ -37,6 +67,22 @@ public OperationBatchRequest(ImmutableArray requests) Requests = requests; } +#endif + + /// + /// Gets the list of operation requests to execute. + /// + public ImmutableArray Requests { get; } + +#if FUSION + /// + /// Gets the file map entries for multipart file uploads. + /// Each entry maps a file key in the variable JSON to the actual file stream, + /// enabling the transport layer to construct the multipart form per the + /// GraphQL multipart request specification. + /// + public ImmutableArray FileMap { get; } +#endif /// /// Writes the request to the specified . @@ -47,7 +93,11 @@ public OperationBatchRequest(ImmutableArray requests) /// /// Thrown if the is . /// +#if FUSION + public void WriteTo(JsonWriter writer) +#else public void WriteTo(Utf8JsonWriter writer) +#endif { ArgumentNullException.ThrowIfNull(writer); diff --git a/src/HotChocolate/AspNetCore/src/Transport.Abstractions/OperationRequest.cs b/src/HotChocolate/AspNetCore/src/Transport.Abstractions/OperationRequest.cs index da912174077..38da00dc000 100644 --- a/src/HotChocolate/AspNetCore/src/Transport.Abstractions/OperationRequest.cs +++ b/src/HotChocolate/AspNetCore/src/Transport.Abstractions/OperationRequest.cs @@ -1,14 +1,68 @@ +#if FUSION +using System.Collections.Immutable; +using HotChocolate.Language; +using HotChocolate.Fusion.Execution; +using HotChocolate.Fusion.Transport.Http; +using HotChocolate.Fusion.Transport.Serialization; +using HotChocolate.Text.Json; + +namespace HotChocolate.Fusion.Transport; +#else using System.Text.Json; using HotChocolate.Language; using HotChocolate.Transport.Serialization; namespace HotChocolate.Transport; +#endif /// /// Represents a GraphQL operation request that can be sent over a WebSocket or HTTP connection. /// public sealed class OperationRequest : IEquatable, IOperationRequest { +#if FUSION + /// + /// Initializes a new instance of the struct. + /// + /// + /// The query document containing the operation to execute. + /// + /// + /// The ID of a previously persisted operation that should be executed. + /// + /// + /// The name of the operation to execute. + /// + /// + /// The requested error handling mode. + /// + /// + /// The pre-serialized variable values to use when executing the operation. + /// + /// + /// The pre-serialized extension values to include with the operation. + /// + /// + /// The file map entries for multipart file uploads. Default is empty. + /// + public OperationRequest( + string? query, + string? id, + string? operationName, + ErrorHandlingMode? onError, + VariableValues variables, + JsonSegment extensions, + ImmutableArray fileMap = default) + { + Query = query; + Id = id; + OperationName = operationName; + OnError = onError; + Variables = variables; + Extensions = extensions; + FileMap = fileMap; + } +#else /// /// Initializes a new instance of the struct. /// @@ -88,6 +142,7 @@ public OperationRequest( Variables = variables; Extensions = extensions; } +#endif /// /// Gets the ID of a previously persisted operation that should be executed. @@ -109,6 +164,25 @@ public OperationRequest( /// public ErrorHandlingMode? OnError { get; } +#if FUSION + /// + /// Gets the pre-serialized variable values to use when executing the operation. + /// + public VariableValues Variables { get; } + + /// + /// Gets the pre-serialized extension values to include with the operation. + /// + public JsonSegment Extensions { get; } + + /// + /// Gets the file map entries for multipart file uploads. + /// Each entry maps a file key in the variable JSON to the actual file stream, + /// enabling the transport layer to construct the multipart form per the + /// GraphQL multipart request specification. + /// + public ImmutableArray FileMap { get; } +#else /// /// Gets a dictionary containing the variable values to use when executing the operation. /// @@ -130,14 +204,19 @@ public OperationRequest( /// operation. /// public ObjectValueNode? ExtensionsNode { get; } +#endif /// - /// Writes a serialized version of this request to a . + /// Writes a serialized version of this request to a JSON writer. /// /// /// The JSON writer. /// +#if FUSION + public void WriteTo(JsonWriter writer) +#else public void WriteTo(Utf8JsonWriter writer) +#endif { ArgumentNullException.ThrowIfNull(writer); @@ -153,6 +232,28 @@ public void WriteTo(Utf8JsonWriter writer) /// /// if the two objects are equal; otherwise, . /// +#if FUSION + public bool Equals(OperationRequest? other) + { + if (other is null) + { + return false; + } + + return Id == other.Id + && Query == other.Query + && Variables.Equals(other.Variables) + && Extensions.Equals(other.Extensions); + } + + /// + public override bool Equals(object? obj) + => obj is OperationRequest other && Equals(other); + + /// + public override int GetHashCode() + => HashCode.Combine(Id, Query, Variables, Extensions); +#else public bool Equals(OperationRequest? other) { if (other is null) @@ -175,6 +276,7 @@ public override bool Equals(object? obj) /// public override int GetHashCode() => HashCode.Combine(Id, Query, Variables, Extensions, VariablesNode, ExtensionsNode); +#endif /// /// Determines whether two objects are equal. diff --git a/src/HotChocolate/AspNetCore/src/Transport.Abstractions/Serialization/JsonOptionDefaults.cs b/src/HotChocolate/AspNetCore/src/Transport.Abstractions/Serialization/JsonOptionDefaults.cs index c32a0bf6303..65d92991b7d 100644 --- a/src/HotChocolate/AspNetCore/src/Transport.Abstractions/Serialization/JsonOptionDefaults.cs +++ b/src/HotChocolate/AspNetCore/src/Transport.Abstractions/Serialization/JsonOptionDefaults.cs @@ -1,7 +1,11 @@ using System.Text.Encodings.Web; using System.Text.Json; +#if FUSION +namespace HotChocolate.Fusion.Transport.Serialization; +#else namespace HotChocolate.Transport.Serialization; +#endif /// /// A helper class that contains the default settings for JSON serialization. diff --git a/src/HotChocolate/AspNetCore/src/Transport.Abstractions/Serialization/Utf8GraphQLRequestProperties.cs b/src/HotChocolate/AspNetCore/src/Transport.Abstractions/Serialization/Utf8GraphQLRequestProperties.cs index 87013051dee..fee4b515251 100644 --- a/src/HotChocolate/AspNetCore/src/Transport.Abstractions/Serialization/Utf8GraphQLRequestProperties.cs +++ b/src/HotChocolate/AspNetCore/src/Transport.Abstractions/Serialization/Utf8GraphQLRequestProperties.cs @@ -1,4 +1,8 @@ +#if FUSION +namespace HotChocolate.Fusion.Transport.Serialization; +#else namespace HotChocolate.Transport.Serialization; +#endif /// /// A helper class that contains the default names of the GraphQL request properties. diff --git a/src/HotChocolate/AspNetCore/src/Transport.Abstractions/Serialization/Utf8GraphQLResultProperties.cs b/src/HotChocolate/AspNetCore/src/Transport.Abstractions/Serialization/Utf8GraphQLResultProperties.cs index 8db42ee2379..d302c156add 100644 --- a/src/HotChocolate/AspNetCore/src/Transport.Abstractions/Serialization/Utf8GraphQLResultProperties.cs +++ b/src/HotChocolate/AspNetCore/src/Transport.Abstractions/Serialization/Utf8GraphQLResultProperties.cs @@ -1,4 +1,8 @@ +#if FUSION +namespace HotChocolate.Fusion.Transport.Serialization; +#else namespace HotChocolate.Transport.Serialization; +#endif /// /// This helper class contains the default property names for the GraphQL result object. diff --git a/src/HotChocolate/AspNetCore/src/Transport.Abstractions/Serialization/Utf8JsonWriterHelper.cs b/src/HotChocolate/AspNetCore/src/Transport.Abstractions/Serialization/Utf8JsonWriterHelper.cs index 0af1b5b048c..7f4481f21ec 100644 --- a/src/HotChocolate/AspNetCore/src/Transport.Abstractions/Serialization/Utf8JsonWriterHelper.cs +++ b/src/HotChocolate/AspNetCore/src/Transport.Abstractions/Serialization/Utf8JsonWriterHelper.cs @@ -1,16 +1,128 @@ +#if FUSION +using HotChocolate.Language; +using HotChocolate.Text.Json; +#else using System.Collections; using System.Text; using System.Text.Json; using HotChocolate.Language; -using HotChocolate.Transport.Http; +#endif +#if FUSION +namespace HotChocolate.Fusion.Transport.Serialization; +#else namespace HotChocolate.Transport.Serialization; +#endif /// -/// Helper methods for writing to a . +/// Helper methods for writing to a JSON writer. /// internal static class Utf8JsonWriterHelper { +#if FUSION + public static void WriteOperationRequest(JsonWriter writer, OperationBatchRequest batchRequest) + { + writer.WriteStartArray(); + + foreach (var request in batchRequest.Requests) + { + request.WriteTo(writer); + } + + writer.WriteEndArray(); + } + + public static void WriteOperationRequest(JsonWriter writer, OperationRequest request) + { + writer.WriteStartObject(); + + if (!string.IsNullOrWhiteSpace(request.Id)) + { + writer.WritePropertyName(Utf8GraphQLRequestProperties.IdProp); + writer.WriteStringValue(request.Id); + } + + if (!string.IsNullOrWhiteSpace(request.Query)) + { + writer.WritePropertyName(Utf8GraphQLRequestProperties.QueryProp); + writer.WriteStringValue(request.Query); + } + + if (!string.IsNullOrWhiteSpace(request.OperationName)) + { + writer.WritePropertyName(Utf8GraphQLRequestProperties.OperationNameProp); + writer.WriteStringValue(request.OperationName); + } + + if (request.OnError is { } errorHandlingMode) + { + writer.WritePropertyName(Utf8GraphQLRequestProperties.OnErrorProp); + writer.WriteStringValue(GetErrorHandlingModeAsString(errorHandlingMode)); + } + + if (!request.Variables.IsEmpty) + { + writer.WritePropertyName(Utf8GraphQLRequestProperties.VariablesProp); + request.Variables.Values.WriteTo(writer); + } + + if (!request.Extensions.IsEmpty) + { + writer.WritePropertyName(Utf8GraphQLRequestProperties.ExtensionsProp); + request.Extensions.WriteTo(writer); + } + + writer.WriteEndObject(); + } + + public static void WriteVariableBatchRequest(JsonWriter writer, VariableBatchRequest request) + { + writer.WriteStartObject(); + + if (!string.IsNullOrWhiteSpace(request.Id)) + { + writer.WritePropertyName(Utf8GraphQLRequestProperties.IdProp); + writer.WriteStringValue(request.Id); + } + + if (!string.IsNullOrWhiteSpace(request.Query)) + { + writer.WritePropertyName(Utf8GraphQLRequestProperties.QueryProp); + writer.WriteStringValue(request.Query); + } + + if (!string.IsNullOrWhiteSpace(request.OperationName)) + { + writer.WritePropertyName(Utf8GraphQLRequestProperties.OperationNameProp); + writer.WriteStringValue(request.OperationName); + } + + if (request.OnError is { } errorHandlingMode) + { + writer.WritePropertyName(Utf8GraphQLRequestProperties.OnErrorProp); + writer.WriteStringValue(GetErrorHandlingModeAsString(errorHandlingMode)); + } + + if (!request.Variables.IsDefaultOrEmpty) + { + writer.WritePropertyName(Utf8GraphQLRequestProperties.VariablesProp); + writer.WriteStartArray(); + foreach (var vars in request.Variables) + { + vars.Values.WriteTo(writer); + } + writer.WriteEndArray(); + } + + if (!request.Extensions.IsEmpty) + { + writer.WritePropertyName(Utf8GraphQLRequestProperties.ExtensionsProp); + request.Extensions.WriteTo(writer); + } + + writer.WriteEndObject(); + } +#else public static void WriteOperationRequest(Utf8JsonWriter writer, OperationBatchRequest batchRequest) { writer.WriteStartArray(); @@ -27,6 +139,10 @@ public static void WriteOperationRequest(Utf8JsonWriter writer, OperationBatchRe WriteVariableBatchRequest(writer, variableBatchRequest); break; + case IRequestBody requestBody: + requestBody.WriteTo(writer); + break; + default: throw new NotSupportedException( "The operation request type is not supported."); @@ -359,17 +475,6 @@ internal static IReadOnlyList WriteFilesMap( return fileInfos; } - private static string GetErrorHandlingModeAsString(ErrorHandlingMode mode) - { - return mode switch - { - ErrorHandlingMode.Propagate => "PROPAGATE", - ErrorHandlingMode.Null => "NULL", - ErrorHandlingMode.Halt => "HALT", - _ => throw new ArgumentOutOfRangeException(nameof(mode)) - }; - } - private static void CollectFiles(IRequestBody requestBody, ref Dictionary? files) { switch (requestBody) @@ -615,4 +720,16 @@ private sealed class IndexFilePath(FilePath? parent, int index) { public int Index { get; } = index; } +#endif + + private static string GetErrorHandlingModeAsString(ErrorHandlingMode mode) + { + return mode switch + { + ErrorHandlingMode.Propagate => "PROPAGATE", + ErrorHandlingMode.Null => "NULL", + ErrorHandlingMode.Halt => "HALT", + _ => throw new ArgumentOutOfRangeException(nameof(mode)) + }; + } } diff --git a/src/HotChocolate/AspNetCore/src/Transport.Abstractions/VariableBatchRequest.cs b/src/HotChocolate/AspNetCore/src/Transport.Abstractions/VariableBatchRequest.cs index c33ae6f9736..7d13f293e0e 100644 --- a/src/HotChocolate/AspNetCore/src/Transport.Abstractions/VariableBatchRequest.cs +++ b/src/HotChocolate/AspNetCore/src/Transport.Abstractions/VariableBatchRequest.cs @@ -1,14 +1,65 @@ +#if FUSION +using System.Collections.Immutable; +using HotChocolate.Language; +using HotChocolate.Text.Json; +using HotChocolate.Fusion.Execution; +using HotChocolate.Fusion.Transport.Serialization; + +namespace HotChocolate.Fusion.Transport; +#else using System.Text.Json; using HotChocolate.Language; using HotChocolate.Transport.Serialization; namespace HotChocolate.Transport; +#endif /// /// Represents a GraphQL operation request that can be sent over a WebSocket connection. /// public sealed class VariableBatchRequest : IOperationRequest, IEquatable { +#if FUSION + /// + /// Initializes a new instance of the struct. + /// + /// + /// The query document containing the operation to execute. + /// + /// + /// The ID of a previously persisted operation that should be executed. + /// + /// + /// The name of the operation to execute. + /// + /// + /// The requested error handling mode. + /// + /// + /// A list of dictionaries representing the sets of variable values to use when executing the operation. + /// + /// + /// A dictionary containing extension values to include with the operation. + /// + /// + /// Thrown if the query, ID, and extensions parameters are all null. + /// + public VariableBatchRequest( + string? query, + string? id, + string? operationName, + ErrorHandlingMode? onError, + ImmutableArray variables, + JsonSegment extensions) + { + Query = query; + Id = id; + OperationName = operationName; + OnError = onError; + Variables = variables; + Extensions = extensions; + } +#else /// /// Initializes a new instance of the struct. /// @@ -88,6 +139,7 @@ public VariableBatchRequest( Variables = variables; Extensions = extensions; } +#endif /// /// Gets the ID of a previously persisted operation that should be executed. @@ -109,6 +161,17 @@ public VariableBatchRequest( /// public ErrorHandlingMode? OnError { get; } +#if FUSION + /// + /// Gets a list of dictionaries representing the sets of variable values to use when executing the operation. + /// + public ImmutableArray Variables { get; } + + /// + /// Gets a dictionary containing extension values to include with the operation. + /// + public JsonSegment Extensions { get; } +#else /// /// Gets a list of dictionaries representing the sets of variable values to use when executing the operation. /// @@ -130,14 +193,19 @@ public VariableBatchRequest( /// operation. /// public ObjectValueNode? ExtensionsNode { get; } +#endif /// - /// Writes a serialized version of this request to a . + /// Writes a serialized version of this request to a JSON writer. /// /// /// The JSON writer. /// +#if FUSION + public void WriteTo(JsonWriter writer) +#else public void WriteTo(Utf8JsonWriter writer) +#endif { ArgumentNullException.ThrowIfNull(writer); @@ -153,6 +221,28 @@ public void WriteTo(Utf8JsonWriter writer) /// /// if the two objects are equal; otherwise, . /// +#if FUSION + public bool Equals(VariableBatchRequest? other) + { + if (other is null) + { + return false; + } + + return Id == other.Id + && Query == other.Query + && Variables.Equals(other.Variables) + && Extensions.Equals(other.Extensions); + } + + /// + public override bool Equals(object? obj) + => obj is VariableBatchRequest other && Equals(other); + + /// + public override int GetHashCode() + => HashCode.Combine(Id, Query, Variables, Extensions); +#else public bool Equals(VariableBatchRequest? other) { if (other is null) @@ -170,11 +260,12 @@ public bool Equals(VariableBatchRequest? other) /// public override bool Equals(object? obj) - => obj is OperationRequest other && Equals(other); + => obj is VariableBatchRequest other && Equals(other); /// public override int GetHashCode() => HashCode.Combine(Id, Query, Variables, Extensions, VariablesNode, ExtensionsNode); +#endif /// /// Determines whether two objects are equal. diff --git a/src/HotChocolate/AspNetCore/src/Transport.Http/DefaultGraphQLHttpClient.cs b/src/HotChocolate/AspNetCore/src/Transport.Http/DefaultGraphQLHttpClient.cs index 6f896022846..049d6020b0c 100644 --- a/src/HotChocolate/AspNetCore/src/Transport.Http/DefaultGraphQLHttpClient.cs +++ b/src/HotChocolate/AspNetCore/src/Transport.Http/DefaultGraphQLHttpClient.cs @@ -7,10 +7,15 @@ using HotChocolate.Buffers; using HotChocolate.Language; #if FUSION -using HotChocolate.Transport; +using HotChocolate.Fusion.Transport; +using HotChocolate.Fusion.Transport.Http; +using HotChocolate.Fusion.Transport.Serialization; +using HotChocolate.Text.Json; using HotChocolate.Transport.Http; -#endif +using HotChocolate.Types; +#else using HotChocolate.Transport.Serialization; +#endif using static System.Net.Http.HttpCompletionOption; #if FUSION @@ -173,14 +178,19 @@ private static HttpRequestMessage CreateRequestMessage( } else { -#endif message.Headers.Accept.Clear(); - foreach (var contentType in request.Accept) - { - message.Headers.Accept.Add(contentType); - } -#if FUSION + foreach (var contentType in request.Accept) + { + message.Headers.Accept.Add(contentType); + } + } +#else + message.Headers.Accept.Clear(); + foreach (var contentType in request.Accept) + { + message.Headers.Accept.Add(contentType); } + #endif if (method == GraphQLHttpMethod.Post) @@ -213,9 +223,14 @@ private static ByteArrayContent CreatePostContent( PooledArrayWriter arrayWriter, GraphQLHttpRequest request) { +#if FUSION + var jsonWriter = new JsonWriter(arrayWriter, JsonOptionDefaults.WriterOptions); + request.Body.WriteTo(jsonWriter); +#else using var jsonWriter = new Utf8JsonWriter(arrayWriter, JsonOptionDefaults.WriterOptions); request.Body.WriteTo(jsonWriter); jsonWriter.Flush(); +#endif Debug.WriteLine(Encoding.UTF8.GetString(arrayWriter.WrittenSpan)); @@ -230,40 +245,144 @@ private static ByteArrayContent CreatePostContent( return content; } +#if FUSION private static HttpContent CreateMultipartContent( PooledArrayWriter arrayWriter, GraphQLHttpRequest request) { - var fileInfos = WriteFileMapJson(arrayWriter, request); + var fileEntries = CollectFileEntries(request.Body); - if (fileInfos.Count == 0) + if (fileEntries.Count == 0) { arrayWriter.Reset(); return CreatePostContent(arrayWriter, request); } + // Group file entries by key so each physical file is written once. + var uniqueFiles = GroupFileEntriesByKey(fileEntries); + + // Write the file map JSON first. + WriteFileMapJson(arrayWriter, uniqueFiles); var start = arrayWriter.Length; + + // Write the operations JSON. WriteOperationJson(arrayWriter, request); var buffer = PooledArrayWriterMarshal.GetUnderlyingBuffer(arrayWriter); var form = new MultipartFormDataContent(); var operation = new ByteArrayContent(buffer, start, arrayWriter.Length - start); -#if FUSION operation.Headers.ContentType = null; operation.Headers.TryAddWithoutValidation("Content-Type", JsonUtf8ContentType); -#else - operation.Headers.ContentType = new MediaTypeHeaderValue(ContentType.Json, "utf-8"); -#endif form.Add(operation, "operations"); var fileMap = new ByteArrayContent(buffer, 0, start); -#if FUSION fileMap.Headers.ContentType = null; fileMap.Headers.TryAddWithoutValidation("Content-Type", JsonUtf8ContentType); + form.Add(fileMap, "map"); + + for (var i = 0; i < uniqueFiles.Count; i++) + { + var (_, file, _) = uniqueFiles[i]; + var fileContent = new StreamContent(file.OpenReadStream()); + if (!string.IsNullOrEmpty(file.ContentType)) + { + fileContent.Headers.ContentType = new MediaTypeHeaderValue(file.ContentType); + } + + form.Add(fileContent, i.ToString(), file.Name); + } + + return form; + } + + private static void WriteOperationJson(PooledArrayWriter arrayWriter, GraphQLHttpRequest request) + { + var jsonWriter = new JsonWriter(arrayWriter, JsonOptionDefaults.WriterOptions); + request.Body.WriteTo(jsonWriter); + } + + private static void WriteFileMapJson( + PooledArrayWriter arrayWriter, + List<(string Key, IFile File, List Paths)> uniqueFiles) + { + var jsonWriter = new JsonWriter(arrayWriter, JsonOptionDefaults.WriterOptions); + jsonWriter.WriteStartObject(); + + for (var i = 0; i < uniqueFiles.Count; i++) + { + jsonWriter.WritePropertyName(i.ToString()); + jsonWriter.WriteStartArray(); + + foreach (var path in uniqueFiles[i].Paths) + { + jsonWriter.WriteStringValue(path); + } + + jsonWriter.WriteEndArray(); + } + + jsonWriter.WriteEndObject(); + } + + private static List<(string Key, IFile File, List Paths)> GroupFileEntriesByKey( + IReadOnlyList fileEntries) + { + var result = new List<(string Key, IFile File, List Paths)>(); + var keyIndex = new Dictionary(StringComparer.Ordinal); + + for (var i = 0; i < fileEntries.Count; i++) + { + var entry = fileEntries[i]; + + if (keyIndex.TryGetValue(entry.Key, out var existingIndex)) + { + result[existingIndex].Paths.Add(entry.Path); + } + else + { + keyIndex[entry.Key] = result.Count; + result.Add((entry.Key, entry.file, new List { entry.Path })); + } + } + + return result; + } + + private static IReadOnlyList CollectFileEntries(IRequestBody body) + { + return body switch + { + OperationRequest { FileMap: { IsDefaultOrEmpty: false } fileMap } => [..fileMap], + OperationBatchRequest { FileMap: { IsDefaultOrEmpty: false } fileMap } => [..fileMap], + _ => [] + }; + } #else + private static HttpContent CreateMultipartContent( + PooledArrayWriter arrayWriter, + GraphQLHttpRequest request) + { + var fileInfos = WriteFileMapJson(arrayWriter, request); + + if (fileInfos.Count == 0) + { + arrayWriter.Reset(); + return CreatePostContent(arrayWriter, request); + } + + var start = arrayWriter.Length; + WriteOperationJson(arrayWriter, request); + var buffer = PooledArrayWriterMarshal.GetUnderlyingBuffer(arrayWriter); + + var form = new MultipartFormDataContent(); + + var operation = new ByteArrayContent(buffer, start, arrayWriter.Length - start); + operation.Headers.ContentType = new MediaTypeHeaderValue(ContentType.Json, "utf-8"); + form.Add(operation, "operations"); + + var fileMap = new ByteArrayContent(buffer, 0, start); fileMap.Headers.ContentType = new MediaTypeHeaderValue(ContentType.Json, "utf-8"); -#endif form.Add(fileMap, "map"); foreach (var fileInfo in fileInfos) @@ -293,7 +412,91 @@ private static IReadOnlyList WriteFileMapJson( using var jsonWriter = new Utf8JsonWriter(arrayWriter, JsonOptionDefaults.WriterOptions); return Utf8JsonWriterHelper.WriteFilesMap(jsonWriter, request.Body); } +#endif + +#if FUSION + private static Uri CreateGetRequestUri( + PooledArrayWriter arrayWriter, + Uri baseAddress, + IRequestBody body) + { + if (body is not OperationRequest or) + { + throw new InvalidOperationException( + HttpResources.DefaultGraphQLHttpClient_BatchNotAllowed); + } + + var sb = new StringBuilder(); + var appendAmpersand = false; + + sb.Append(baseAddress); + sb.Append('?'); + + if (!string.IsNullOrWhiteSpace(or.Id)) + { + AppendAmpersand(sb, ref appendAmpersand); + sb.Append("id="); + sb.Append(Uri.EscapeDataString(or.Id!)); + } + + if (!string.IsNullOrWhiteSpace(or.Query)) + { + AppendAmpersand(sb, ref appendAmpersand); + sb.Append("query="); + sb.Append(Uri.EscapeDataString(or.Query!)); + } + + if (!string.IsNullOrWhiteSpace(or.OperationName)) + { + AppendAmpersand(sb, ref appendAmpersand); + sb.Append("operationName="); + sb.Append(Uri.EscapeDataString(or.OperationName!)); + } + + if (or.OnError is { } errorHandlingMode) + { + AppendAmpersand(sb, ref appendAmpersand); + sb.Append("onError="); + sb.Append(GetErrorHandlingModeAsString(errorHandlingMode)); + } + + if (!or.Variables.IsEmpty) + { + AppendAmpersand(sb, ref appendAmpersand); + sb.Append("variables="); + sb.Append(Uri.EscapeDataString(FormatJsonSegmentAsString(arrayWriter, or.Variables.Values))); + } + + if (!or.Extensions.IsEmpty) + { + AppendAmpersand(sb, ref appendAmpersand); + sb.Append("extensions="); + sb.Append(Uri.EscapeDataString(FormatJsonSegmentAsString(arrayWriter, or.Extensions))); + } + + return new Uri(sb.ToString()); + + static void AppendAmpersand(StringBuilder sb, ref bool appendAmpersand) + { + if (appendAmpersand) + { + sb.Append('&'); + } + + appendAmpersand = true; + } + } + private static string FormatJsonSegmentAsString(PooledArrayWriter arrayWriter, JsonSegment segment) + { + arrayWriter.Reset(); + + var jsonWriter = new JsonWriter(arrayWriter, JsonOptionDefaults.WriterOptions); + segment.WriteTo(jsonWriter); + + return Encoding.UTF8.GetString(arrayWriter.WrittenSpan); + } +#else private static Uri CreateGetRequestUri( PooledArrayWriter arrayWriter, Uri baseAddress, @@ -378,17 +581,6 @@ static void AppendAmpersand(StringBuilder sb, ref bool appendAmpersand) } } - private static string GetErrorHandlingModeAsString(ErrorHandlingMode mode) - { - return mode switch - { - ErrorHandlingMode.Propagate => "PROPAGATE", - ErrorHandlingMode.Null => "NULL", - ErrorHandlingMode.Halt => "HALT", - _ => throw new ArgumentOutOfRangeException(nameof(mode)) - }; - } - private static string FormatDocumentAsJson(PooledArrayWriter arrayWriter, object? obj) { arrayWriter.Reset(); @@ -399,6 +591,18 @@ private static string FormatDocumentAsJson(PooledArrayWriter arrayWriter, object return Encoding.UTF8.GetString(arrayWriter.WrittenSpan); } +#endif + + private static string GetErrorHandlingModeAsString(ErrorHandlingMode mode) + { + return mode switch + { + ErrorHandlingMode.Propagate => "PROPAGATE", + ErrorHandlingMode.Null => "NULL", + ErrorHandlingMode.Halt => "HALT", + _ => throw new ArgumentOutOfRangeException(nameof(mode)) + }; + } protected override void Dispose(bool disposing) { diff --git a/src/HotChocolate/AspNetCore/src/Transport.Http/GraphQLHttpClientExtensions.cs b/src/HotChocolate/AspNetCore/src/Transport.Http/GraphQLHttpClientExtensions.cs index 9a2a6e576fc..d2beeeb5c3b 100644 --- a/src/HotChocolate/AspNetCore/src/Transport.Http/GraphQLHttpClientExtensions.cs +++ b/src/HotChocolate/AspNetCore/src/Transport.Http/GraphQLHttpClientExtensions.cs @@ -1,4 +1,5 @@ #if FUSION +using HotChocolate.Fusion.Execution; using HotChocolate.Transport; #endif @@ -33,10 +34,21 @@ public static Task GetAsync( string query, CancellationToken cancellationToken = default) { +#if FUSION + var operation = new OperationRequest( + query, + id: null, + operationName: null, + onError: null, + variables: VariableValues.Empty, + extensions: JsonSegment.Empty); +#else var operation = new OperationRequest(query); +#endif return GetAsync(client, operation, cancellationToken); } +#if !FUSION /// /// Sends a GraphQL GET request to the specified GraphQL endpoint. /// @@ -132,6 +144,7 @@ public static Task GetAsync( ? GetAsync(client, operation, cancellationToken) : GetAsync(client, operation, uri, cancellationToken); } +#endif /// /// Sends a GraphQL GET request to the specified GraphQL endpoint. @@ -157,7 +170,17 @@ public static Task GetAsync( Uri? uri = null, CancellationToken cancellationToken = default) { +#if FUSION + var operation = new OperationRequest( + query, + id: null, + operationName: null, + onError: null, + variables: VariableValues.Empty, + extensions: JsonSegment.Empty); +#else var operation = new OperationRequest(query); +#endif return uri is null ? GetAsync(client, operation, cancellationToken) : GetAsync(client, operation, uri, cancellationToken); @@ -187,7 +210,17 @@ public static Task GetAsync( string? uri = null, CancellationToken cancellationToken = default) { +#if FUSION + var operation = new OperationRequest( + query, + id: null, + operationName: null, + onError: null, + variables: VariableValues.Empty, + extensions: JsonSegment.Empty); +#else var operation = new OperationRequest(query); +#endif return uri is null ? GetAsync(client, operation, cancellationToken) : GetAsync(client, operation, uri, cancellationToken); @@ -313,10 +346,21 @@ public static Task PostAsync( string query, CancellationToken cancellationToken = default) { +#if FUSION + var operation = new OperationRequest( + query, + id: null, + operationName: null, + onError: null, + variables: VariableValues.Empty, + extensions: JsonSegment.Empty); +#else var operation = new OperationRequest(query); +#endif return PostAsync(client, operation, cancellationToken); } +#if !FUSION /// /// Sends a GraphQL POST request to the specified GraphQL endpoint. /// @@ -344,6 +388,7 @@ public static Task PostAsync( var operation = new OperationRequest(query, variables: variables); return PostAsync(client, operation, cancellationToken); } +#endif /// /// Sends a GraphQL POST request to the specified GraphQL endpoint. @@ -369,7 +414,17 @@ public static Task PostAsync( Uri? uri = null, CancellationToken cancellationToken = default) { +#if FUSION + var operation = new OperationRequest( + query, + id: null, + operationName: null, + onError: null, + variables: VariableValues.Empty, + extensions: JsonSegment.Empty); +#else var operation = new OperationRequest(query); +#endif return uri is null ? PostAsync(client, operation, cancellationToken) : PostAsync(client, operation, uri, cancellationToken); @@ -399,12 +454,23 @@ public static Task PostAsync( string? uri = null, CancellationToken cancellationToken = default) { +#if FUSION + var operation = new OperationRequest( + query, + id: null, + operationName: null, + onError: null, + variables: VariableValues.Empty, + extensions: JsonSegment.Empty); +#else var operation = new OperationRequest(query); +#endif return uri is null ? PostAsync(client, operation, cancellationToken) : PostAsync(client, operation, uri, cancellationToken); } +#if !FUSION /// /// Sends a GraphQL POST request to the specified GraphQL endpoint. /// @@ -472,6 +538,7 @@ public static Task PostAsync( ? PostAsync(client, operation, cancellationToken) : PostAsync(client, operation, uri, cancellationToken); } +#endif /// /// Sends a GraphQL POST request to the specified GraphQL endpoint. diff --git a/src/HotChocolate/AspNetCore/src/Transport.Http/GraphQLHttpRequest.cs b/src/HotChocolate/AspNetCore/src/Transport.Http/GraphQLHttpRequest.cs index 0a4243ea8e0..cc0122433ff 100644 --- a/src/HotChocolate/AspNetCore/src/Transport.Http/GraphQLHttpRequest.cs +++ b/src/HotChocolate/AspNetCore/src/Transport.Http/GraphQLHttpRequest.cs @@ -1,6 +1,7 @@ using System.Collections.Immutable; using System.Net.Http.Headers; #if FUSION +using HotChocolate.Fusion.Execution; using HotChocolate.Fusion.Execution.Clients; using HotChocolate.Transport; using HotChocolate.Transport.Http; @@ -38,7 +39,17 @@ public GraphQLHttpRequest(string query, Uri? requestUri = null) nameof(query)); } +#if FUSION + Body = new OperationRequest( + query, + id: null, + operationName: null, + onError: null, + variables: VariableValues.Empty, + extensions: JsonSegment.Empty); +#else Body = new OperationRequest(query); +#endif Uri = requestUri; } @@ -53,10 +64,20 @@ public GraphQLHttpRequest(string query, Uri? requestUri = null) /// /// /// has no , , - /// or . + /// or . /// public GraphQLHttpRequest(OperationRequest body, Uri? requestUri = null) { +#if FUSION + if (string.IsNullOrEmpty(body.Id) + && string.IsNullOrEmpty(body.Query) + && body.Extensions.IsEmpty) + { + throw new ArgumentException( + HttpResources.GraphQLHttpRequest_QueryIdAndExtensionsNullOrEmpty, + nameof(body)); + } +#else if (string.IsNullOrEmpty(body.Id) && string.IsNullOrEmpty(body.Query) && body.Extensions is null @@ -66,6 +87,7 @@ public GraphQLHttpRequest(OperationRequest body, Uri? requestUri = null) HttpResources.GraphQLHttpRequest_QueryIdAndExtensionsNullOrEmpty, nameof(body)); } +#endif Body = body; Uri = requestUri; @@ -82,10 +104,20 @@ public GraphQLHttpRequest(OperationRequest body, Uri? requestUri = null) /// /// /// has no , , - /// or . + /// or . /// public GraphQLHttpRequest(VariableBatchRequest body, Uri? requestUri = null) { +#if FUSION + if (string.IsNullOrEmpty(body.Id) + && string.IsNullOrEmpty(body.Query) + && body.Extensions.IsEmpty) + { + throw new ArgumentException( + HttpResources.GraphQLHttpRequest_QueryIdAndExtensionsNullOrEmpty, + nameof(body)); + } +#else if (string.IsNullOrEmpty(body.Id) && string.IsNullOrEmpty(body.Query) && body.Extensions is null @@ -95,6 +127,7 @@ public GraphQLHttpRequest(VariableBatchRequest body, Uri? requestUri = null) HttpResources.GraphQLHttpRequest_QueryIdAndExtensionsNullOrEmpty, nameof(body)); } +#endif Body = body; Uri = requestUri; @@ -123,10 +156,16 @@ public GraphQLHttpRequest(OperationBatchRequest body, Uri? requestUri = null) foreach (var request in body.Requests) { +#if FUSION + if (string.IsNullOrEmpty(request.Id) + && string.IsNullOrEmpty(request.Query) + && request.Extensions.IsEmpty) +#else if (string.IsNullOrEmpty(request.Id) && string.IsNullOrEmpty(request.Query) && request.Extensions is null && request.ExtensionsNode is null) +#endif { throw new ArgumentException( HttpResources.GraphQLHttpRequest_QueryIdAndExtensionsNullOrEmpty, @@ -138,6 +177,19 @@ public GraphQLHttpRequest(OperationBatchRequest body, Uri? requestUri = null) Uri = requestUri; } +#if FUSION + /// + /// Initializes a new instance of with a raw request body. + /// + /// The request body to send. + /// The GraphQL request URI. + internal GraphQLHttpRequest(IRequestBody body, Uri? requestUri = null) + { + Body = body; + Uri = requestUri; + } +#endif + /// /// Gets the request body. /// diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Clients/SourceSchemaHttpClient.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Clients/SourceSchemaHttpClient.cs index 68899536eb9..656e531c963 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Clients/SourceSchemaHttpClient.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Clients/SourceSchemaHttpClient.cs @@ -5,9 +5,12 @@ using HotChocolate.Fusion.Execution.Nodes; using HotChocolate.Fusion.Properties; using HotChocolate.Fusion.Text.Json; +using HotChocolate.Fusion.Transport; using HotChocolate.Fusion.Transport.Http; +using HotChocolate.Features; using HotChocolate.Language; -using HotChocolate.Transport; +using HotChocolate.Types; +using HotChocolate.Buffers; namespace HotChocolate.Fusion.Execution.Clients; @@ -18,7 +21,7 @@ namespace HotChocolate.Fusion.Execution.Clients; /// public sealed class SourceSchemaHttpClient : ISourceSchemaClient { - private static readonly Uri UnknownUri = new("http://unknown"); + private static readonly Uri s_unknownUri = new("http://unknown"); private static ReadOnlySpan VariableIndex => "variableIndex"u8; private static ReadOnlySpan RequestIndex => "requestIndex"u8; @@ -74,19 +77,30 @@ public async ValueTask ExecuteAsync( Debug.WriteLine(request.SchemaName); - var httpRequest = CreateHttpRequest(request); - ConfigureCallbacks(httpRequest, context, request.Node); + ChunkedArrayWriter? buffer = null; - var httpResponse = await _client.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); - - return new Response( - request.OperationType, - httpRequest.Uri ?? UnknownUri, - httpResponse, - request.Variables, - context, - request.Node, - _configuration); + try + { + var httpRequest = CreateHttpRequest(context, request, ref buffer); + ConfigureCallbacks(httpRequest, context, request.Node); + + var httpResponse = await _client.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); + + return new Response( + context, + _configuration, + request.Node, + request.OperationType, + httpRequest.Uri ?? s_unknownUri, + request.Variables, + httpResponse, + buffer); + } + catch + { + buffer?.Dispose(); + throw; + } } /// @@ -104,13 +118,31 @@ public IAsyncEnumerable ExecuteBatchStreamAsync( FusionExecutionResources.SourceSchemaHttpClient_SubscriptionBatchNotSupported); } - var httpRequest = CreateHttpBatchRequest(requests); - ConfigureCallbacks(httpRequest, context, requests[0].Node); + var requiresFileUpload = requests[0].RequiresFileUpload; + ChunkedArrayWriter? buffer = null; - return _configuration.OnSourceSchemaResult is null - ? ExecuteBatchStreamCoreAsync(requests, httpRequest, cancellationToken) - : ExecuteBatchStreamWithCallbackAsync( - context, requests, httpRequest, _configuration.OnSourceSchemaResult, cancellationToken); + try + { + var httpRequest = CreateHttpBatchRequest(context, requests, requiresFileUpload, ref buffer); + ConfigureCallbacks(httpRequest, context, requests[0].Node); + + return _configuration.OnSourceSchemaResult is null + ? ExecuteBatchStreamCoreAsync( + requests, + httpRequest, + cancellationToken) + : ExecuteBatchStreamWithCallbackAsync( + context, + requests, + httpRequest, + _configuration.OnSourceSchemaResult, + cancellationToken); + } + catch + { + buffer?.Dispose(); + throw; + } } private async IAsyncEnumerable ExecuteBatchStreamCoreAsync( @@ -291,7 +323,9 @@ private async IAsyncEnumerable ExecuteBatchStreamWithCallback /// based on the number of variable sets and the configured batching mode. /// private GraphQLHttpRequest CreateHttpRequest( - SourceSchemaClientRequest originalRequest) + OperationPlanContext context, + SourceSchemaClientRequest originalRequest, + ref ChunkedArrayWriter? buffer) { var defaultAcceptHeader = originalRequest.OperationType is OperationType.Subscription ? _configuration.SubscriptionAcceptHeaderValue @@ -301,15 +335,14 @@ private GraphQLHttpRequest CreateHttpRequest( switch (originalRequest.Variables.Length) { case 0: - return new GraphQLHttpRequest(CreateSingleRequest(operationSourceText)) + return new GraphQLHttpRequest(CreateSingleRequest(context, originalRequest, ref buffer)) { Uri = _configuration.BaseAddress, AcceptHeaderValue = defaultAcceptHeader }; case 1: - var variableValues = originalRequest.Variables[0].Values; - return new GraphQLHttpRequest(CreateSingleRequest(operationSourceText, variableValues)) + return new GraphQLHttpRequest(CreateSingleRequest(context, originalRequest, ref buffer)) { Uri = _configuration.BaseAddress, AcceptHeaderValue = defaultAcceptHeader, @@ -317,9 +350,10 @@ private GraphQLHttpRequest CreateHttpRequest( }; default: - if (_configuration.BatchingMode == SourceSchemaHttpClientBatchingMode.ApolloRequestBatching) + if (originalRequest.RequiresFileUpload + || _configuration.BatchingMode == SourceSchemaHttpClientBatchingMode.ApolloRequestBatching) { - return new GraphQLHttpRequest(CreateOperationBatchRequest(operationSourceText, originalRequest)) + return new GraphQLHttpRequest(CreateOperationBatchRequest(context, originalRequest, ref buffer)) { Uri = _configuration.BaseAddress, AcceptHeaderValue = _configuration.BatchingAcceptHeaderValue, @@ -330,109 +364,225 @@ private GraphQLHttpRequest CreateHttpRequest( return new GraphQLHttpRequest(CreateVariableBatchRequest(operationSourceText, originalRequest)) { Uri = _configuration.BaseAddress, - AcceptHeaderValue = _configuration.BatchingAcceptHeaderValue, - EnableFileUploads = originalRequest.RequiresFileUpload + AcceptHeaderValue = _configuration.BatchingAcceptHeaderValue }; } } private GraphQLHttpRequest CreateHttpBatchRequest( - IReadOnlyList originalRequests) + OperationPlanContext context, + ImmutableArray originalRequests, + bool requiresFileUpload, + ref ChunkedArrayWriter? buffer) { - var batchRequests = ImmutableArray.CreateBuilder(originalRequests.Count); - var enableFileUploads = false; - - for (var i = 0; i < originalRequests.Count; i++) + if (requiresFileUpload) { - var sourceRequest = originalRequests[i]; - enableFileUploads |= sourceRequest.RequiresFileUpload; + var batchRequests = ImmutableArray.CreateBuilder(); + var fileEntries = ImmutableArray.CreateBuilder(); + var fileLookup = context.RequestContext.Features.GetRequired(); + buffer ??= new ChunkedArrayWriter(); + var i = 0; - var body = CreateRequestBody(sourceRequest); - if (body is IOperationRequest operationRequest) + foreach (var sourceRequest in originalRequests) { - batchRequests.Add(operationRequest); + switch (sourceRequest.Variables.Length) + { + case 0: + batchRequests.Add( + CreateBatchUploadRequest( + sourceRequest, + VariableValues.Empty, + buffer, + fileLookup, + fileEntries)); + i++; + break; + + case 1: + batchRequests.Add( + CreateBatchUploadRequest( + sourceRequest, + sourceRequest.Variables[0], + buffer, + fileLookup, + fileEntries, + $"{i}.variables")); + i++; + break; + + default: + for (var j = 0; j < sourceRequest.Variables.Length; j++) + { + batchRequests.Add( + CreateBatchUploadRequest( + sourceRequest, + sourceRequest.Variables[j], + buffer, + fileLookup, + fileEntries, + $"{i}.variables")); + i++; + } + break; + } } - else + + return new GraphQLHttpRequest( + new OperationBatchRequest(batchRequests.MoveToImmutable(), fileEntries.ToImmutable())) + { + Uri = _configuration.BaseAddress, + AcceptHeaderValue = _configuration.BatchingAcceptHeaderValue, + EnableFileUploads = true + }; + } + else + { + var batchRequests = ImmutableArray.CreateBuilder(originalRequests.Length); + + foreach (var sourceRequest in originalRequests) { - throw new InvalidOperationException( - $"The request body type '{body.GetType().Name}' cannot be included in an operation batch."); + var body = CreateRequestBody(context, sourceRequest, ref buffer); + if (body is IOperationRequest operationRequest) + { + batchRequests.Add(operationRequest); + } + else + { + throw new InvalidOperationException( + $"The request body type '{body.GetType().Name}' cannot be included in an operation batch."); + } } + + return new GraphQLHttpRequest(new OperationBatchRequest(batchRequests.MoveToImmutable())) + { + Uri = _configuration.BaseAddress, + AcceptHeaderValue = _configuration.BatchingAcceptHeaderValue + }; } + } - return new GraphQLHttpRequest(new OperationBatchRequest(batchRequests.MoveToImmutable())) + private static IRequestBody CreateRequestBody( + OperationPlanContext context, + SourceSchemaClientRequest originalRequest, + ref ChunkedArrayWriter? writer) + { + return originalRequest.Variables.Length switch { - Uri = _configuration.BaseAddress, - AcceptHeaderValue = _configuration.BatchingAcceptHeaderValue, - EnableFileUploads = enableFileUploads + 0 or 1 => CreateSingleRequest(context, originalRequest, ref writer), + _ => CreateVariableBatchRequest(originalRequest.OperationSourceText, originalRequest) }; } - private static IRequestBody CreateRequestBody( - SourceSchemaClientRequest originalRequest) + private static OperationRequest CreateSingleRequest( + OperationPlanContext context, + SourceSchemaClientRequest originalRequest, + ref ChunkedArrayWriter? writer) { - var operationSourceText = originalRequest.OperationSourceText; + var variables = originalRequest.Variables.IsDefaultOrEmpty + ? VariableValues.Empty + : originalRequest.Variables[0]; - switch (originalRequest.Variables.Length) + if (originalRequest.RequiresFileUpload) { - case 0: - return CreateSingleRequest(operationSourceText); - - case 1: - var variableValues = originalRequest.Variables[0].Values; - return CreateSingleRequest(operationSourceText, variableValues); - - default: - return CreateVariableBatchRequest(operationSourceText, originalRequest); + writer ??= new ChunkedArrayWriter(); + var fileLookup = context.RequestContext.Features.GetRequired(); + var (cleanedJson, fileMap) = FileEntryBuilder.Build(writer, variables.Values, fileLookup); + + return new OperationRequest( + originalRequest.OperationSourceText, + id: null, + operationName: null, + onError: null, + variables: variables with { Values = cleanedJson }, + extensions: JsonSegment.Empty, + fileMap: fileMap); } + + return new OperationRequest( + originalRequest.OperationSourceText, + id: null, + operationName: null, + onError: null, + variables: variables, + extensions: JsonSegment.Empty); } - private static OperationRequest CreateSingleRequest( - string operationSourceText, - ObjectValueNode? variables = null) + private static OperationRequest CreateBatchUploadRequest( + SourceSchemaClientRequest originalRequest, + VariableValues variables, + ChunkedArrayWriter writer, + IFileLookup fileLookup, + ImmutableArray.Builder fileEntries, + string pathPrefix = "variables") { + var cleanedJson = FileEntryBuilder.Build(writer, variables.Values, fileLookup, fileEntries, pathPrefix); + return new OperationRequest( - operationSourceText, + originalRequest.OperationSourceText, id: null, operationName: null, onError: null, - variables: variables, - extensions: null); + variables: variables with { Values = cleanedJson }, + extensions: JsonSegment.Empty); } private static OperationBatchRequest CreateOperationBatchRequest( - string operationSourceText, - SourceSchemaClientRequest originalRequest) + OperationPlanContext context, + SourceSchemaClientRequest originalRequest, + ref ChunkedArrayWriter? writer) { - var requests = new OperationRequest[originalRequest.Variables.Length]; - - for (var i = 0; i < requests.Length; i++) + if (originalRequest.RequiresFileUpload) { - requests[i] = CreateSingleRequest( - operationSourceText, - originalRequest.Variables[i].Values); + writer ??= new ChunkedArrayWriter(); + var fileLookup = context.RequestContext.Features.GetRequired(); + var fileEntries = ImmutableArray.CreateBuilder(); + var requests = new OperationRequest[originalRequest.Variables.Length]; + + for (var i = 0; i < requests.Length; i++) + { + requests[i] = CreateBatchUploadRequest( + originalRequest, + originalRequest.Variables[i], + writer, + fileLookup, + fileEntries, + $"{i}.variables"); + } + + return new OperationBatchRequest( + ImmutableArray.Create(requests), + fileEntries.ToImmutable()); } + else + { + var requests = new OperationRequest[originalRequest.Variables.Length]; + + for (var i = 0; i < requests.Length; i++) + { + requests[i] = new OperationRequest( + originalRequest.OperationSourceText, + id: null, + operationName: null, + onError: null, + variables: originalRequest.Variables[i], + extensions: JsonSegment.Empty); + } - return new OperationBatchRequest(ImmutableArray.Create(requests)); + return new OperationBatchRequest(ImmutableArray.Create(requests)); + } } private static VariableBatchRequest CreateVariableBatchRequest( string operationSourceText, SourceSchemaClientRequest originalRequest) { - var variables = new ObjectValueNode[originalRequest.Variables.Length]; - - for (var i = 0; i < originalRequest.Variables.Length; i++) - { - variables[i] = originalRequest.Variables[i].Values; - } - return new VariableBatchRequest( operationSourceText, id: null, operationName: null, onError: null, - variables: variables, - extensions: null); + variables: originalRequest.Variables, + extensions: JsonSegment.Empty); } private static int ResolveRequestIndex( @@ -574,13 +724,14 @@ private static bool ContainsSubscriptionRequest( /// requests where the response stream is read lazily on enumeration. /// private sealed class Response( + OperationPlanContext context, + SourceSchemaHttpClientConfiguration configuration, + ExecutionNode node, OperationType operation, Uri uri, - GraphQLHttpResponse response, ImmutableArray variables, - OperationPlanContext context, - ExecutionNode node, - SourceSchemaHttpClientConfiguration configuration) + GraphQLHttpResponse response, + ChunkedArrayWriter? buffer) : SourceSchemaClientResponse { public override Uri Uri => uri; @@ -805,6 +956,10 @@ private async IAsyncEnumerable ReadAsResultStreamWithCallbac } } - public override void Dispose() => response.Dispose(); + public override void Dispose() + { + response.Dispose(); + buffer?.Dispose(); + } } } diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/JsonVariableCoercion.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/JsonVariableCoercion.cs index ffcc9145394..396d8b3680f 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/JsonVariableCoercion.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/JsonVariableCoercion.cs @@ -6,7 +6,7 @@ using HotChocolate.Features; using HotChocolate.Fusion.Types; using HotChocolate.Language; -using HotChocolate.Transport.Http; +using HotChocolate.Fusion.Transport; using HotChocolate.Types; namespace HotChocolate.Fusion.Execution; @@ -346,9 +346,9 @@ private readonly bool TryParseScalar( { if (element.ValueKind is JsonValueKind.String && element.GetString() is { Length: > 0 } fileKey - && _context.Features.GetRequired().TryGetFile(fileKey, out var file)) + && _context.Features.GetRequired().TryGetFile(fileKey, out _)) { - value = new FileReferenceNode(file.OpenReadStream, file.Name, file.ContentType); + value = new StringValueNode($"$.file({fileKey})"); error = null; return true; } diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/OperationBatchExecutionNode.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/OperationBatchExecutionNode.cs index c68f13d2c73..78b30327c62 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/OperationBatchExecutionNode.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/OperationBatchExecutionNode.cs @@ -8,6 +8,7 @@ namespace HotChocolate.Fusion.Execution.Nodes; public sealed class OperationBatchExecutionNode : ExecutionNode { private readonly OperationDefinition[] _operations; + private readonly bool _requiresFileUpload; internal OperationBatchExecutionNode( int id, @@ -16,6 +17,7 @@ internal OperationBatchExecutionNode( Id = id; _operations = operations; SchemaName = operations[0].SchemaName!; + _requiresFileUpload = operations.Any(t => t.RequiresFileUpload); } public override int Id { get; } @@ -293,7 +295,7 @@ private int BuildRequests( OperationType = operation.Operation.Type, OperationSourceText = operation.Operation.SourceText, Variables = variables, - RequiresFileUpload = operation.RequiresFileUpload + RequiresFileUpload = _requiresFileUpload }); operationByIndex[operationCount] = operation; diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/JsonOperationPlanFormatter.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/JsonOperationPlanFormatter.cs index 3124a277e93..03094025086 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/JsonOperationPlanFormatter.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Nodes/Serialization/JsonOperationPlanFormatter.cs @@ -4,8 +4,7 @@ using System.Text.Encodings.Web; using System.Text.Json; using HotChocolate.Buffers; -using HotChocolate.Language; -using HotChocolate.Transport.Http; +using JsonWriter = HotChocolate.Text.Json.JsonWriter; namespace HotChocolate.Fusion.Execution.Nodes.Serialization; @@ -42,35 +41,43 @@ public override string Format(OperationPlan plan, OperationPlanTrace? trace = nu /// Optional trace information to include in the output. public void Format(IBufferWriter writer, OperationPlan plan, OperationPlanTrace? trace = null) { - using var jsonWriter = new Utf8JsonWriter(writer, _writerOptions); + var jsonWriter = new JsonWriter(writer, _writerOptions); jsonWriter.WriteStartObject(); - jsonWriter.WriteString("id", plan.Id); + jsonWriter.WritePropertyName("id"); + jsonWriter.WriteStringValue(plan.Id); jsonWriter.WritePropertyName("operation"); WriteOperation(jsonWriter, plan.Operation); - jsonWriter.WriteNumber("searchSpace", plan.SearchSpace); - jsonWriter.WriteNumber("expandedNodes", plan.ExpandedNodes); + jsonWriter.WritePropertyName("searchSpace"); + jsonWriter.WriteNumberValue(plan.SearchSpace); + + jsonWriter.WritePropertyName("expandedNodes"); + jsonWriter.WriteNumberValue(plan.ExpandedNodes); if (trace is not null) { if (!string.IsNullOrEmpty(trace.AppId)) { - jsonWriter.WriteString("appId", trace.AppId); + jsonWriter.WritePropertyName("appId"); + jsonWriter.WriteStringValue(trace.AppId); } if (!string.IsNullOrEmpty(trace.EnvironmentName)) { - jsonWriter.WriteString("environment", trace.EnvironmentName); + jsonWriter.WritePropertyName("environment"); + jsonWriter.WriteStringValue(trace.EnvironmentName); } if (!string.IsNullOrEmpty(trace.TraceId)) { - jsonWriter.WriteString("traceId", trace.TraceId); + jsonWriter.WritePropertyName("traceId"); + jsonWriter.WriteStringValue(trace.TraceId); } - jsonWriter.WriteNumber("duration", trace.Duration.TotalMilliseconds); + jsonWriter.WritePropertyName("duration"); + jsonWriter.WriteNumberValue(trace.Duration.TotalMilliseconds); } jsonWriter.WritePropertyName("nodes"); @@ -81,7 +88,7 @@ public void Format(IBufferWriter writer, OperationPlan plan, OperationPlan internal void Format(IBufferWriter writer, Operation operation, ImmutableArray allNodes) { - using var jsonWriter = new Utf8JsonWriter(writer, _writerOptions); + var jsonWriter = new JsonWriter(writer, _writerOptions); jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("operation"); @@ -94,28 +101,37 @@ internal void Format(IBufferWriter writer, Operation operation, ImmutableA } private static void WriteOperation( - Utf8JsonWriter jsonWriter, + JsonWriter jsonWriter, Operation operation) { jsonWriter.WriteStartObject(); if (!string.IsNullOrEmpty(operation.Name)) { - jsonWriter.WriteString("name", operation.Name); + jsonWriter.WritePropertyName("name"); + jsonWriter.WriteStringValue(operation.Name); } - jsonWriter.WriteString("kind", operation.Definition.Operation.ToString()); - jsonWriter.WriteString("document", operation.Definition.ToString(indented: true)); + jsonWriter.WritePropertyName("kind"); + jsonWriter.WriteStringValue(operation.Definition.Operation.ToString()); + + jsonWriter.WritePropertyName("document"); + jsonWriter.WriteStringValue(operation.Definition.ToString(indented: true)); + + jsonWriter.WritePropertyName("id"); + jsonWriter.WriteStringValue(operation.Id); + + jsonWriter.WritePropertyName("hash"); + jsonWriter.WriteStringValue(operation.Hash); - jsonWriter.WriteString("id", operation.Id); - jsonWriter.WriteString("hash", operation.Hash); - jsonWriter.WriteString("shortHash", operation.Hash[..8]); + jsonWriter.WritePropertyName("shortHash"); + jsonWriter.WriteStringValue(operation.Hash[..8]); jsonWriter.WriteEndObject(); } private static void WriteNodes( - Utf8JsonWriter jsonWriter, + JsonWriter jsonWriter, Operation operation, ImmutableArray allNodes, OperationPlanTrace? trace) @@ -151,38 +167,58 @@ private static void WriteNodes( } private static void WriteOperationNode( - Utf8JsonWriter jsonWriter, + JsonWriter jsonWriter, Operation operation, OperationExecutionNode node, ExecutionNodeTrace? trace) { jsonWriter.WriteStartObject(); - jsonWriter.WriteNumber("id", node.Id); - jsonWriter.WriteString("type", node.Type.ToString()); + + jsonWriter.WritePropertyName("id"); + jsonWriter.WriteNumberValue(node.Id); + + jsonWriter.WritePropertyName("type"); + jsonWriter.WriteStringValue(node.Type.ToString()); if (!string.IsNullOrEmpty(node.SchemaName)) { - jsonWriter.WriteString("schema", node.SchemaName); + jsonWriter.WritePropertyName("schema"); + jsonWriter.WriteStringValue(node.SchemaName); } - jsonWriter.WriteStartObject("operation"); - jsonWriter.WriteString("name", node.Operation.Name); - jsonWriter.WriteString("kind", node.Operation.Type.ToString()); - jsonWriter.WriteString("document", node.Operation.SourceText); - jsonWriter.WriteString("hash", node.Operation.Hash); - jsonWriter.WriteString("shortHash", node.Operation.Hash[..8]); + jsonWriter.WritePropertyName("operation"); + jsonWriter.WriteStartObject(); + + jsonWriter.WritePropertyName("name"); + jsonWriter.WriteStringValue(node.Operation.Name); + + jsonWriter.WritePropertyName("kind"); + jsonWriter.WriteStringValue(node.Operation.Type.ToString()); + + jsonWriter.WritePropertyName("document"); + jsonWriter.WriteStringValue(node.Operation.SourceText); + + jsonWriter.WritePropertyName("hash"); + jsonWriter.WriteStringValue(node.Operation.Hash); + + jsonWriter.WritePropertyName("shortHash"); + jsonWriter.WriteStringValue(node.Operation.Hash[..8]); + jsonWriter.WriteEndObject(); - jsonWriter.WriteString("resultSelectionSet", node.ResultSelectionSet.ToString(indented: false)); + jsonWriter.WritePropertyName("resultSelectionSet"); + jsonWriter.WriteStringValue(node.ResultSelectionSet.ToString(indented: false)); if (!node.Source.IsRoot) { - jsonWriter.WriteString("source", node.Source.ToString()); + jsonWriter.WritePropertyName("source"); + jsonWriter.WriteStringValue(node.Source.ToString()); } if (!node.Target.IsRoot) { - jsonWriter.WriteString("target", node.Target.ToString()); + jsonWriter.WritePropertyName("target"); + jsonWriter.WriteStringValue(node.Target.ToString()); } if (node.Requirements.Length > 0) @@ -193,10 +229,19 @@ private static void WriteOperationNode( foreach (var requirement in node.Requirements) { jsonWriter.WriteStartObject(); - jsonWriter.WriteString("name", requirement.Key); - jsonWriter.WriteString("type", requirement.Type.ToString()); - jsonWriter.WriteString("path", requirement.Path.ToString()); - jsonWriter.WriteString("selectionMap", requirement.Map.ToString()); + + jsonWriter.WritePropertyName("name"); + jsonWriter.WriteStringValue(requirement.Key); + + jsonWriter.WritePropertyName("type"); + jsonWriter.WriteStringValue(requirement.Type.ToString()); + + jsonWriter.WritePropertyName("path"); + jsonWriter.WriteStringValue(requirement.Path.ToString()); + + jsonWriter.WritePropertyName("selectionMap"); + jsonWriter.WriteStringValue(requirement.Map.ToString()); + jsonWriter.WriteEndObject(); } @@ -207,7 +252,8 @@ private static void WriteOperationNode( if (node.ForwardedVariables.Length > 0) { - jsonWriter.WriteStartArray("forwardedVariables"); + jsonWriter.WritePropertyName("forwardedVariables"); + jsonWriter.WriteStartArray(); foreach (var variableName in node.ForwardedVariables) { @@ -219,7 +265,8 @@ private static void WriteOperationNode( if (node.RequiresFileUpload) { - jsonWriter.WriteBoolean("requiresFileUpload", true); + jsonWriter.WritePropertyName("requiresFileUpload"); + jsonWriter.WriteBooleanValue(true); } if (node.Dependencies.Length > 0) @@ -241,7 +288,7 @@ private static void WriteOperationNode( } private static void WriteBatchExecutionNode( - Utf8JsonWriter jsonWriter, + JsonWriter jsonWriter, Operation operation, OperationBatchExecutionNode batchNode, ExecutionNodeTrace? trace) @@ -264,42 +311,63 @@ private static void WriteBatchExecutionNode( } private static void WriteOperationDefinitionAsNode( - Utf8JsonWriter jsonWriter, + JsonWriter jsonWriter, Operation operation, OperationBatchExecutionNode batchNode, SingleOperationDefinition operationDef, ExecutionNodeTrace? trace) { jsonWriter.WriteStartObject(); - jsonWriter.WriteNumber("id", operationDef.Id); - jsonWriter.WriteString("type", nameof(ExecutionNodeType.Operation)); + + jsonWriter.WritePropertyName("id"); + jsonWriter.WriteNumberValue(operationDef.Id); + + jsonWriter.WritePropertyName("type"); + jsonWriter.WriteStringValue(nameof(ExecutionNodeType.Operation)); if (!string.IsNullOrEmpty(operationDef.SchemaName)) { - jsonWriter.WriteString("schema", operationDef.SchemaName); + jsonWriter.WritePropertyName("schema"); + jsonWriter.WriteStringValue(operationDef.SchemaName); } - jsonWriter.WriteStartObject("operation"); - jsonWriter.WriteString("name", operationDef.Operation.Name); - jsonWriter.WriteString("kind", operationDef.Operation.Type.ToString()); - jsonWriter.WriteString("document", operationDef.Operation.SourceText); - jsonWriter.WriteString("hash", operationDef.Operation.Hash); - jsonWriter.WriteString("shortHash", operationDef.Operation.Hash[..8]); + jsonWriter.WritePropertyName("operation"); + jsonWriter.WriteStartObject(); + + jsonWriter.WritePropertyName("name"); + jsonWriter.WriteStringValue(operationDef.Operation.Name); + + jsonWriter.WritePropertyName("kind"); + jsonWriter.WriteStringValue(operationDef.Operation.Type.ToString()); + + jsonWriter.WritePropertyName("document"); + jsonWriter.WriteStringValue(operationDef.Operation.SourceText); + + jsonWriter.WritePropertyName("hash"); + jsonWriter.WriteStringValue(operationDef.Operation.Hash); + + jsonWriter.WritePropertyName("shortHash"); + jsonWriter.WriteStringValue(operationDef.Operation.Hash[..8]); + jsonWriter.WriteEndObject(); - jsonWriter.WriteString("resultSelectionSet", operationDef.ResultSelectionSet.ToString(indented: false)); + jsonWriter.WritePropertyName("resultSelectionSet"); + jsonWriter.WriteStringValue(operationDef.ResultSelectionSet.ToString(indented: false)); if (!operationDef.Source.IsRoot) { - jsonWriter.WriteString("source", operationDef.Source.ToString()); + jsonWriter.WritePropertyName("source"); + jsonWriter.WriteStringValue(operationDef.Source.ToString()); } if (!operationDef.Target.IsRoot) { - jsonWriter.WriteString("target", operationDef.Target.ToString()); + jsonWriter.WritePropertyName("target"); + jsonWriter.WriteStringValue(operationDef.Target.ToString()); } - jsonWriter.WriteNumber("batchingGroupId", batchNode.Id); + jsonWriter.WritePropertyName("batchingGroupId"); + jsonWriter.WriteNumberValue(batchNode.Id); if (operationDef.Requirements.Length > 0) { @@ -309,10 +377,19 @@ private static void WriteOperationDefinitionAsNode( foreach (var requirement in operationDef.Requirements) { jsonWriter.WriteStartObject(); - jsonWriter.WriteString("name", requirement.Key); - jsonWriter.WriteString("type", requirement.Type.ToString()); - jsonWriter.WriteString("path", requirement.Path.ToString()); - jsonWriter.WriteString("selectionMap", requirement.Map.ToString()); + + jsonWriter.WritePropertyName("name"); + jsonWriter.WriteStringValue(requirement.Key); + + jsonWriter.WritePropertyName("type"); + jsonWriter.WriteStringValue(requirement.Type.ToString()); + + jsonWriter.WritePropertyName("path"); + jsonWriter.WriteStringValue(requirement.Path.ToString()); + + jsonWriter.WritePropertyName("selectionMap"); + jsonWriter.WriteStringValue(requirement.Map.ToString()); + jsonWriter.WriteEndObject(); } @@ -323,7 +400,8 @@ private static void WriteOperationDefinitionAsNode( if (operationDef.ForwardedVariables.Length > 0) { - jsonWriter.WriteStartArray("forwardedVariables"); + jsonWriter.WritePropertyName("forwardedVariables"); + jsonWriter.WriteStartArray(); foreach (var variableName in operationDef.ForwardedVariables) { @@ -335,7 +413,8 @@ private static void WriteOperationDefinitionAsNode( if (operationDef.RequiresFileUpload) { - jsonWriter.WriteBoolean("requiresFileUpload", true); + jsonWriter.WritePropertyName("requiresFileUpload"); + jsonWriter.WriteBooleanValue(true); } if (operationDef.Dependencies.Length > 0) @@ -357,37 +436,57 @@ private static void WriteOperationDefinitionAsNode( } private static void WriteBatchOperationDefinitionAsNode( - Utf8JsonWriter jsonWriter, + JsonWriter jsonWriter, Operation operation, OperationBatchExecutionNode batchNode, BatchOperationDefinition operationDef, ExecutionNodeTrace? trace) { jsonWriter.WriteStartObject(); - jsonWriter.WriteNumber("id", operationDef.Id); - jsonWriter.WriteString("type", ExecutionNodeType.OperationBatch.ToString()); + + jsonWriter.WritePropertyName("id"); + jsonWriter.WriteNumberValue(operationDef.Id); + + jsonWriter.WritePropertyName("type"); + jsonWriter.WriteStringValue(ExecutionNodeType.OperationBatch.ToString()); if (!string.IsNullOrEmpty(operationDef.SchemaName)) { - jsonWriter.WriteString("schema", operationDef.SchemaName); + jsonWriter.WritePropertyName("schema"); + jsonWriter.WriteStringValue(operationDef.SchemaName); } - jsonWriter.WriteStartObject("operation"); - jsonWriter.WriteString("name", operationDef.Operation.Name); - jsonWriter.WriteString("kind", operationDef.Operation.Type.ToString()); - jsonWriter.WriteString("document", operationDef.Operation.SourceText); - jsonWriter.WriteString("hash", operationDef.Operation.Hash); - jsonWriter.WriteString("shortHash", operationDef.Operation.Hash[..8]); + jsonWriter.WritePropertyName("operation"); + jsonWriter.WriteStartObject(); + + jsonWriter.WritePropertyName("name"); + jsonWriter.WriteStringValue(operationDef.Operation.Name); + + jsonWriter.WritePropertyName("kind"); + jsonWriter.WriteStringValue(operationDef.Operation.Type.ToString()); + + jsonWriter.WritePropertyName("document"); + jsonWriter.WriteStringValue(operationDef.Operation.SourceText); + + jsonWriter.WritePropertyName("hash"); + jsonWriter.WriteStringValue(operationDef.Operation.Hash); + + jsonWriter.WritePropertyName("shortHash"); + jsonWriter.WriteStringValue(operationDef.Operation.Hash[..8]); + jsonWriter.WriteEndObject(); - jsonWriter.WriteString("resultSelectionSet", operationDef.ResultSelectionSet.ToString(indented: false)); + jsonWriter.WritePropertyName("resultSelectionSet"); + jsonWriter.WriteStringValue(operationDef.ResultSelectionSet.ToString(indented: false)); if (!operationDef.Source.IsRoot) { - jsonWriter.WriteString("source", operationDef.Source.ToString()); + jsonWriter.WritePropertyName("source"); + jsonWriter.WriteStringValue(operationDef.Source.ToString()); } - jsonWriter.WriteStartArray("targets"); + jsonWriter.WritePropertyName("targets"); + jsonWriter.WriteStartArray(); foreach (var target in operationDef.Targets) { @@ -396,7 +495,8 @@ private static void WriteBatchOperationDefinitionAsNode( jsonWriter.WriteEndArray(); - jsonWriter.WriteNumber("batchingGroupId", batchNode.Id); + jsonWriter.WritePropertyName("batchingGroupId"); + jsonWriter.WriteNumberValue(batchNode.Id); if (operationDef.Requirements.Length > 0) { @@ -406,10 +506,19 @@ private static void WriteBatchOperationDefinitionAsNode( foreach (var requirement in operationDef.Requirements) { jsonWriter.WriteStartObject(); - jsonWriter.WriteString("name", requirement.Key); - jsonWriter.WriteString("type", requirement.Type.ToString()); - jsonWriter.WriteString("path", requirement.Path.ToString()); - jsonWriter.WriteString("selectionMap", requirement.Map.ToString()); + + jsonWriter.WritePropertyName("name"); + jsonWriter.WriteStringValue(requirement.Key); + + jsonWriter.WritePropertyName("type"); + jsonWriter.WriteStringValue(requirement.Type.ToString()); + + jsonWriter.WritePropertyName("path"); + jsonWriter.WriteStringValue(requirement.Path.ToString()); + + jsonWriter.WritePropertyName("selectionMap"); + jsonWriter.WriteStringValue(requirement.Map.ToString()); + jsonWriter.WriteEndObject(); } @@ -420,7 +529,8 @@ private static void WriteBatchOperationDefinitionAsNode( if (operationDef.ForwardedVariables.Length > 0) { - jsonWriter.WriteStartArray("forwardedVariables"); + jsonWriter.WritePropertyName("forwardedVariables"); + jsonWriter.WriteStartArray(); foreach (var variableName in operationDef.ForwardedVariables) { @@ -432,7 +542,8 @@ private static void WriteBatchOperationDefinitionAsNode( if (operationDef.RequiresFileUpload) { - jsonWriter.WriteBoolean("requiresFileUpload", true); + jsonWriter.WritePropertyName("requiresFileUpload"); + jsonWriter.WriteBooleanValue(true); } if (operationDef.Dependencies.Length > 0) @@ -453,7 +564,7 @@ private static void WriteBatchOperationDefinitionAsNode( jsonWriter.WriteEndObject(); } - private static void WriteConditions(Utf8JsonWriter jsonWriter, ReadOnlySpan conditions) + private static void WriteConditions(JsonWriter jsonWriter, ReadOnlySpan conditions) { if (conditions.Length > 0) { @@ -463,8 +574,13 @@ private static void WriteConditions(Utf8JsonWriter jsonWriter, ReadOnlySpan kvp.Key)) { - jsonWriter.WriteNumber(branch.Key, branch.Value.Id); + jsonWriter.WritePropertyName(branch.Key); + jsonWriter.WriteNumberValue(branch.Value.Id); } jsonWriter.WriteEndObject(); - jsonWriter.WriteNumber("fallback", node.FallbackQuery.Id); + jsonWriter.WritePropertyName("fallback"); + jsonWriter.WriteNumberValue(node.FallbackQuery.Id); TryWriteConditions(jsonWriter, node); @@ -533,26 +671,31 @@ private static void WriteNodeFieldNode( jsonWriter.WriteEndObject(); } - private static void TryWriteNodeTrace(Utf8JsonWriter jsonWriter, Operation operation, ExecutionNodeTrace? trace) + private static void TryWriteNodeTrace(JsonWriter jsonWriter, Operation operation, ExecutionNodeTrace? trace) { if (trace is not null) { if (!string.IsNullOrEmpty(trace.SpanId)) { - jsonWriter.WriteString("spanId", trace.SpanId); + jsonWriter.WritePropertyName("spanId"); + jsonWriter.WriteStringValue(trace.SpanId); } - jsonWriter.WriteNumber("duration", trace.Duration.TotalMilliseconds); - jsonWriter.WriteString("status", trace.Status.ToString()); + jsonWriter.WritePropertyName("duration"); + jsonWriter.WriteNumberValue(trace.Duration.TotalMilliseconds); + + jsonWriter.WritePropertyName("status"); + jsonWriter.WriteStringValue(trace.Status.ToString()); if (trace.VariableSets.Length > 0) { - jsonWriter.WriteStartObject("variableSets"); + jsonWriter.WritePropertyName("variableSets"); + jsonWriter.WriteStartObject(); foreach (var variableSet in trace.VariableSets) { jsonWriter.WritePropertyName(variableSet.Path.ToPath(operation).Print()); - WriteObjectValueNode(jsonWriter, variableSet.Values); + variableSet.Values.WriteTo(jsonWriter); } jsonWriter.WriteEndObject(); @@ -560,15 +703,21 @@ private static void TryWriteNodeTrace(Utf8JsonWriter jsonWriter, Operation opera if (trace.Transport is not null) { - jsonWriter.WriteStartObject("transport"); - jsonWriter.WriteString("uri", trace.Transport.Uri.ToString()); - jsonWriter.WriteString("contentType", trace.Transport.ContentType); + jsonWriter.WritePropertyName("transport"); + jsonWriter.WriteStartObject(); + + jsonWriter.WritePropertyName("uri"); + jsonWriter.WriteStringValue(trace.Transport.Uri.ToString()); + + jsonWriter.WritePropertyName("contentType"); + jsonWriter.WriteStringValue(trace.Transport.ContentType); + jsonWriter.WriteEndObject(); } } } - private static void TryWriteConditions(Utf8JsonWriter jsonWriter, ExecutionNode node) + private static void TryWriteConditions(JsonWriter jsonWriter, ExecutionNode node) { if (node.Conditions.Length > 0) { @@ -578,78 +727,17 @@ private static void TryWriteConditions(Utf8JsonWriter jsonWriter, ExecutionNode foreach (var condition in node.Conditions) { jsonWriter.WriteStartObject(); - jsonWriter.WriteString("variable", "$" + condition.VariableName); - jsonWriter.WriteBoolean("passingValue", condition.PassingValue); - jsonWriter.WriteEndObject(); - } - jsonWriter.WriteEndArray(); - } - } + jsonWriter.WritePropertyName("variable"); + jsonWriter.WriteStringValue("$" + condition.VariableName); - private static void WriteObjectValueNode(Utf8JsonWriter jsonWriter, ObjectValueNode node) - { - jsonWriter.WriteStartObject(); + jsonWriter.WritePropertyName("passingValue"); + jsonWriter.WriteBooleanValue(condition.PassingValue); - foreach (var field in node.Fields) - { - if (field.Value is FileReferenceNode) - { - continue; + jsonWriter.WriteEndObject(); } - jsonWriter.WritePropertyName(field.Name.Value); - WriteValueNode(jsonWriter, field.Value); - } - - jsonWriter.WriteEndObject(); - } - - private static void WriteValueNode(Utf8JsonWriter jsonWriter, IValueNode value) - { - switch (value) - { - case EnumValueNode enumValue: - jsonWriter.WriteStringValue(enumValue.Value); - break; - - case FloatValueNode floatValue: - jsonWriter.WriteRawValue(floatValue.AsSpan()); - break; - - case IntValueNode intValue: - jsonWriter.WriteRawValue(intValue.AsSpan()); - break; - - case BooleanValueNode booleanValue: - jsonWriter.WriteBooleanValue(booleanValue.Value); - break; - - case ListValueNode listValue: - jsonWriter.WriteStartArray(); - - foreach (var item in listValue.Items) - { - WriteValueNode(jsonWriter, item); - } - - jsonWriter.WriteEndArray(); - break; - - case NullValueNode: - jsonWriter.WriteNullValue(); - break; - - case ObjectValueNode objectValue: - WriteObjectValueNode(jsonWriter, objectValue); - break; - - case StringValueNode stringValue: - jsonWriter.WriteStringValue(stringValue.AsSpan()); - break; - - default: - throw new ArgumentOutOfRangeException(nameof(value)); + jsonWriter.WriteEndArray(); } } } diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs index ff8a9446ac2..2bc2e25c16a 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/OperationPlanContext.cs @@ -220,11 +220,11 @@ internal void CompleteNode(ExecutionNodeResult result) internal ImmutableArray CreateVariableValueSets( SelectionPath selectionSet, ReadOnlySpan forwardedVariables, - ReadOnlySpan requiredData) + ReadOnlySpan requirements) { ArgumentNullException.ThrowIfNull(selectionSet); - if (requiredData.Length == 0) + if (requirements.Length == 0) { if (forwardedVariables.Length == 0) { @@ -233,16 +233,16 @@ internal ImmutableArray CreateVariableValueSets( return []; } - return [new VariableValues(ToResultPath(selectionSet), new ObjectValueNode([]))]; + return [_resultStore.CreateVariableValueSets(ToResultPath(selectionSet), [])]; } var variableValues = GetPathThroughVariables(forwardedVariables); - return [new VariableValues(CompactPath.Root, new ObjectValueNode(variableValues))]; + return [_resultStore.CreateVariableValueSets(CompactPath.Root, variableValues)]; } else { var variableValues = GetPathThroughVariables(forwardedVariables); - return _resultStore.CreateVariableValueSets(selectionSet, variableValues, requiredData); + return _resultStore.CreateVariableValueSets(selectionSet, variableValues, requirements); } } @@ -259,7 +259,7 @@ internal ImmutableArray CreateVariableValueSets( } var variableValues = GetPathThroughVariables(forwardedVariables); - return [new VariableValues(CompactPath.Root, new ObjectValueNode(variableValues))]; + return [_resultStore.CreateVariableValueSets(CompactPath.Root, variableValues)]; } else { diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.Pooling.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.Pooling.cs index ae12c809d1a..8d7a62d516d 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.Pooling.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.Pooling.cs @@ -80,6 +80,9 @@ internal void Clean(int maxCollectTargetRetainLength, int maxDictionaryRetainCap _errors?.Clear(); _pocketedErrors?.Clear(); + // reset variable writer (returns excess chunks, keeps the first) + _variableWriter.Clean(); + // clear collect target arrays to unroot CompositeResultDocument references; // if they grew too large during a burst, swap them for smaller ones. TrimOrClearBuffer(ref _collectTargetA, maxCollectTargetRetainLength); @@ -88,10 +91,7 @@ internal void Clean(int maxCollectTargetRetainLength, int maxDictionaryRetainCap // clear dictionaries/hashsets; drop oversized ones. TrimOrClear(ref _seenPaths, maxDictionaryRetainCapacity, ReferenceEqualityComparer.Instance); - TrimOrClear(ref _seenStrings, maxDictionaryRetainCapacity, StringComparer.Ordinal); - TrimOrClear(ref _seenValueNodes, maxDictionaryRetainCapacity, SingleValueNodeComparer.Instance); - TrimOrClear(ref _seenTwoValueTuples, maxDictionaryRetainCapacity, TwoValueNodeTupleComparer.Instance); - TrimOrClear(ref _seenThreeValueTuples, maxDictionaryRetainCapacity, ThreeValueNodeTupleComparer.Instance); + _variableDedupTable.Clear(); // null out per-request references _result = default!; @@ -129,15 +129,17 @@ private static void TrimOrClear( } } - private static void TrimOrClear( - ref Dictionary dict, + private static void TrimOrClear( + ref Dictionary dict, int maxRetainCapacity, - IEqualityComparer comparer) + IEqualityComparer? comparer = null) where TKey : notnull { if (dict.Count > maxRetainCapacity) { - dict = new Dictionary(comparer); + dict = comparer is null + ? new Dictionary() + : new Dictionary(comparer); } else { diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs index 2cfdcf4e1e6..6f7a6dc4404 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/FetchResultStore.cs @@ -13,6 +13,7 @@ using HotChocolate.Fusion.Text.Json; using HotChocolate.Language; using HotChocolate.Types; +using HotChocolate.Text.Json; namespace HotChocolate.Fusion.Execution.Results; @@ -24,6 +25,9 @@ internal sealed partial class FetchResultStore : IDisposable private readonly object _lock = new(); #endif private readonly List _memory = []; + private readonly ChunkedArrayWriter _variableWriter = new(); + private readonly JsonWriter _jsonWriter; + private readonly VariableDedupTable _variableDedupTable; private ISchemaDefinition _schema = default!; private IErrorHandler _errorHandler = default!; private Operation _operation = default!; @@ -34,16 +38,18 @@ internal sealed partial class FetchResultStore : IDisposable private CompositeResultElement[] _collectTargetCombined = ArrayPool.Shared.Rent(64); private PathSegmentLocalPool _pathPool = default!; private HashSet _seenPaths = new(ReferenceEqualityComparer.Instance); - private Dictionary _seenStrings = new(StringComparer.Ordinal); - private Dictionary _seenValueNodes = new(SingleValueNodeComparer.Instance); - private Dictionary _seenTwoValueTuples = new(TwoValueNodeTupleComparer.Instance); - private Dictionary _seenThreeValueTuples = new(ThreeValueNodeTupleComparer.Instance); private CompositeResultDocument _result = default!; private ValueCompletion _valueCompletion = default!; private List? _errors; private Dictionary? _pocketedErrors; private bool _disposed; + internal FetchResultStore() + { + _jsonWriter = new JsonWriter(_variableWriter, new JsonWriterOptions { Indented = false }); + _variableDedupTable = new VariableDedupTable(_variableWriter); + } + public CompositeResultDocument Result => _result; public IReadOnlyList? Errors => _errors; @@ -750,6 +756,7 @@ private ImmutableArray BuildVariableValueSets( IReadOnlyList requestVariables, ReadOnlySpan requiredData) { + _variableDedupTable.Initialize(elements.Length); PooledArrayWriter? buffer = null; if (requestVariables.Count == 0) @@ -758,21 +765,18 @@ private ImmutableArray BuildVariableValueSets( { 1 => BuildVariableValueSetsSingleRequirement( elements, - requiredData[0], - ref buffer), + requiredData[0]), 2 => BuildVariableValueSetsTwoRequirements( elements, requiredData[0], - requiredData[1], - ref buffer), + requiredData[1]), 3 => BuildVariableValueSetsThreeRequirements( elements, requiredData[0], requiredData[1], - requiredData[2], - ref buffer), + requiredData[2]), _ => default }; @@ -791,38 +795,56 @@ private ImmutableArray BuildVariableValueSets( } VariableValues[]? variableValueSets = null; - Dictionary? seen = null; var additionalPaths = new AdditionalPathAccumulator(); var nextIndex = 0; foreach (var result in elements) { - var variables = MapRequirements(result, requestVariables, requiredData, ref buffer); + variableValueSets ??= new VariableValues[elements.Length]; + + _jsonWriter.Reset(_variableWriter); + var startPosition = _variableWriter.Position; + _jsonWriter.WriteStartObject(); - if (variables is null) + // Write forwarded variables. + for (var i = 0; i < requestVariables.Count; i++) { - continue; + var field = requestVariables[i]; + _jsonWriter.WritePropertyName(field.Name.Value); + WriteValueNode(field.Value); } - variableValueSets ??= new VariableValues[elements.Length]; + // Write requirement fields. + var failed = false; - if (nextIndex > 0) + foreach (var requirement in requiredData) { - seen ??= new Dictionary(elements.Length, VariableValueComparer.Instance) - { - [variableValueSets[0].Values] = 0 - }; + _jsonWriter.WritePropertyName(requirement.Key); - if (seen.TryGetValue(variables, out var existingIndex)) + if (!ResultDataMapper.TryMap(result, requirement.Map, _schema, _jsonWriter)) { - additionalPaths.Add(existingIndex, result.CompactPath); - continue; + failed = true; + break; } + } + + if (failed) + { + _variableWriter.ResetTo(startPosition); + continue; + } + + _jsonWriter.WriteEndObject(); + + var entry = TryCreateVariableValues( + result.CompactPath, startPosition, ref additionalPaths, nextIndex); - seen[variables] = nextIndex; + if (entry is null) + { + continue; } - variableValueSets[nextIndex++] = new VariableValues(result.CompactPath, variables); + variableValueSets[nextIndex++] = entry.Value; } if (buffer is not null) @@ -833,31 +855,26 @@ private ImmutableArray BuildVariableValueSets( } } + _variableDedupTable.Clear(); return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex); } private ImmutableArray BuildVariableValueSetsSingleRequirement( ReadOnlySpan elements, - OperationRequirement requirement, - ref PooledArrayWriter? buffer) + OperationRequirement requirement) { if (TryGetSimpleRequirementFieldName(requirement.Map, out var fieldName)) { - return BuildVariableValueSetsSingleRequirementFastPath( - elements, - requirement, - fieldName, - ref buffer); + return BuildVariableValueSetsSingleRequirementFastPath(elements, requirement, fieldName); } - return BuildVariableValueSetsSingleRequirementSlowPath(elements, requirement, ref buffer); + return BuildVariableValueSetsSingleRequirementSlowPath(elements, requirement); } private ImmutableArray BuildVariableValueSetsSingleRequirementFastPath( ReadOnlySpan elements, OperationRequirement requirement, - string fieldName, - ref PooledArrayWriter? buffer) + string fieldName) { VariableValues[]? variableValueSets = null; var additionalPaths = new AdditionalPathAccumulator(); @@ -886,105 +903,76 @@ private ImmutableArray BuildVariableValueSetsSingleRequirementFa } variableValueSets ??= new VariableValues[elements.Length]; - IValueNode mappedValue; - if (valueKind is JsonValueKind.String) - { - var stringValue = value.AssertString(); + _jsonWriter.Reset(_variableWriter); + var startPosition = _variableWriter.Position; - if (_seenStrings.TryGetValue(stringValue, out var existingIndex)) - { - additionalPaths.Add(existingIndex, result.CompactPath); - continue; - } - - mappedValue = ResultDataMapper.GetStringValueNode(stringValue); - _seenStrings[stringValue] = nextIndex; - } - else - { - mappedValue = ResultDataMapper.MapLeafValue(value, ref buffer); + // Write variable JSON: {"key":rawValue} + _jsonWriter.WriteStartObject(); + _jsonWriter.WritePropertyName(requirement.Key); + WriteCompositeResultValue(value); + _jsonWriter.WriteEndObject(); - if (_seenValueNodes.TryGetValue(mappedValue, out var existingIndex)) - { - additionalPaths.Add(existingIndex, result.CompactPath); - continue; - } + // we try to create a VariableValues object, + // if that fails the variables already were created and we move on. + var entry = TryCreateVariableValues(result.CompactPath, startPosition, ref additionalPaths, nextIndex); - _seenValueNodes[mappedValue] = nextIndex; + if (entry is null) + { + continue; } - variableValueSets[nextIndex++] = new VariableValues( - result.CompactPath, - new ObjectValueNode([ - new ObjectFieldNode( - requirement.Key, - mappedValue) - ])); + variableValueSets[nextIndex++] = entry.Value; } - _seenStrings.Clear(); - _seenValueNodes.Clear(); + _variableDedupTable.Clear(); return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex); } private ImmutableArray BuildVariableValueSetsSingleRequirementSlowPath( ReadOnlySpan elements, - OperationRequirement requirement, - ref PooledArrayWriter? buffer) + OperationRequirement requirement) { VariableValues[]? variableValueSets = null; var additionalPaths = new AdditionalPathAccumulator(); var nextIndex = 0; - var seeded = false; foreach (var result in elements) { - var value = ResultDataMapper.Map(result, requirement.Map, _schema, ref buffer); + variableValueSets ??= new VariableValues[elements.Length]; - if (value is null) - { - continue; - } + _jsonWriter.Reset(_variableWriter); + var startPosition = _variableWriter.Position; + _jsonWriter.WriteStartObject(); + _jsonWriter.WritePropertyName(requirement.Key); - if (value.Kind == SyntaxKind.NullValue && requirement.Type.Kind == SyntaxKind.NonNullType) + if (!ResultDataMapper.TryMap(result, requirement.Map, _schema, _jsonWriter)) { + _variableWriter.ResetTo(startPosition); continue; } - variableValueSets ??= new VariableValues[elements.Length]; - - if (nextIndex > 0) - { - if (!seeded) - { - _seenValueNodes[variableValueSets[0].Values.Fields[0].Value] = 0; - seeded = true; - } + _jsonWriter.WriteEndObject(); - if (_seenValueNodes.TryGetValue(value, out var existingIndex)) - { - additionalPaths.Add(existingIndex, result.CompactPath); - continue; - } + var entry = TryCreateVariableValues( + result.CompactPath, startPosition, ref additionalPaths, nextIndex); - _seenValueNodes[value] = nextIndex; + if (entry is null) + { + continue; } - variableValueSets[nextIndex++] = new VariableValues( - result.CompactPath, - new ObjectValueNode([new ObjectFieldNode(requirement.Key, value)])); + variableValueSets[nextIndex++] = entry.Value; } - _seenValueNodes.Clear(); + _variableDedupTable.Clear(); return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex); } private ImmutableArray BuildVariableValueSetsTwoRequirements( ReadOnlySpan elements, OperationRequirement requirement1, - OperationRequirement requirement2, - ref PooledArrayWriter? buffer) + OperationRequirement requirement2) { if (TryGetSimpleRequirementFieldName(requirement1.Map, out var fieldName1) && TryGetSimpleRequirementFieldName(requirement2.Map, out var fieldName2)) @@ -994,15 +982,13 @@ private ImmutableArray BuildVariableValueSetsTwoRequirements( requirement1, fieldName1, requirement2, - fieldName2, - ref buffer); + fieldName2); } return BuildVariableValueSetsTwoRequirementsSlowPath( elements, requirement1, - requirement2, - ref buffer); + requirement2); } private ImmutableArray BuildVariableValueSetsTwoRequirementsFastPath( @@ -1010,130 +996,102 @@ private ImmutableArray BuildVariableValueSetsTwoRequirementsFast OperationRequirement requirement1, string fieldName1, OperationRequirement requirement2, - string fieldName2, - ref PooledArrayWriter? buffer) + string fieldName2) { VariableValues[]? variableValueSets = null; var additionalPaths = new AdditionalPathAccumulator(); var nextIndex = 0; - var seeded = false; foreach (var result in elements) { if (!result.TryGetProperty(fieldName1, out var value1) || value1.ValueKind is JsonValueKind.Undefined - || value1.ValueKind is JsonValueKind.Null - && requirement1.Type.Kind == SyntaxKind.NonNullType) + || (value1.ValueKind is JsonValueKind.Null + && requirement1.Type.Kind == SyntaxKind.NonNullType)) { continue; } if (!result.TryGetProperty(fieldName2, out var value2) || value2.ValueKind is JsonValueKind.Undefined - || value2.ValueKind is JsonValueKind.Null - && requirement2.Type.Kind == SyntaxKind.NonNullType) + || (value2.ValueKind is JsonValueKind.Null + && requirement2.Type.Kind == SyntaxKind.NonNullType)) { continue; } - var mappedValue1 = MapRequirementLeafValue(value1, ref buffer); - var mappedValue2 = MapRequirementLeafValue(value2, ref buffer); variableValueSets ??= new VariableValues[elements.Length]; - var key = new TwoValueNodeTuple(mappedValue1, mappedValue2); - if (nextIndex > 0) - { - if (!seeded) - { - _seenTwoValueTuples[new TwoValueNodeTuple( - variableValueSets[0].Values.Fields[0].Value, - variableValueSets[0].Values.Fields[1].Value)] = 0; - seeded = true; - } + _jsonWriter.Reset(_variableWriter); + var startPosition = _variableWriter.Position; + _jsonWriter.WriteStartObject(); + _jsonWriter.WritePropertyName(requirement1.Key); + WriteCompositeResultValue(value1); + _jsonWriter.WritePropertyName(requirement2.Key); + WriteCompositeResultValue(value2); + _jsonWriter.WriteEndObject(); - if (_seenTwoValueTuples.TryGetValue(key, out var existingIndex)) - { - additionalPaths.Add(existingIndex, result.CompactPath); - continue; - } + var entry = TryCreateVariableValues( + result.CompactPath, startPosition, ref additionalPaths, nextIndex); - _seenTwoValueTuples[key] = nextIndex; + if (entry is null) + { + continue; } - variableValueSets[nextIndex++] = new VariableValues( - result.CompactPath, - new ObjectValueNode([ - new ObjectFieldNode(requirement1.Key, mappedValue1), - new ObjectFieldNode(requirement2.Key, mappedValue2) - ])); + variableValueSets[nextIndex++] = entry.Value; } - _seenTwoValueTuples.Clear(); + _variableDedupTable.Clear(); return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex); } private ImmutableArray BuildVariableValueSetsTwoRequirementsSlowPath( ReadOnlySpan elements, OperationRequirement requirement1, - OperationRequirement requirement2, - ref PooledArrayWriter? buffer) + OperationRequirement requirement2) { VariableValues[]? variableValueSets = null; var additionalPaths = new AdditionalPathAccumulator(); var nextIndex = 0; - var seeded = false; foreach (var result in elements) { - var value1 = ResultDataMapper.Map(result, requirement1.Map, _schema, ref buffer); + variableValueSets ??= new VariableValues[elements.Length]; + + _jsonWriter.Reset(_variableWriter); + var startPosition = _variableWriter.Position; + _jsonWriter.WriteStartObject(); - if (value1 is null - || value1.Kind == SyntaxKind.NullValue - && requirement1.Type.Kind == SyntaxKind.NonNullType) + _jsonWriter.WritePropertyName(requirement1.Key); + + if (!ResultDataMapper.TryMap(result, requirement1.Map, _schema, _jsonWriter)) { + _variableWriter.ResetTo(startPosition); continue; } - var value2 = ResultDataMapper.Map(result, requirement2.Map, _schema, ref buffer); + _jsonWriter.WritePropertyName(requirement2.Key); - if (value2 is null - || value2.Kind == SyntaxKind.NullValue - && requirement2.Type.Kind == SyntaxKind.NonNullType) + if (!ResultDataMapper.TryMap(result, requirement2.Map, _schema, _jsonWriter)) { + _variableWriter.ResetTo(startPosition); continue; } - variableValueSets ??= new VariableValues[elements.Length]; - var key = new TwoValueNodeTuple(value1, value2); + _jsonWriter.WriteEndObject(); - if (nextIndex > 0) - { - if (!seeded) - { - _seenTwoValueTuples[new TwoValueNodeTuple( - variableValueSets[0].Values.Fields[0].Value, - variableValueSets[0].Values.Fields[1].Value)] = 0; - seeded = true; - } - - if (_seenTwoValueTuples.TryGetValue(key, out var existingIndex)) - { - additionalPaths.Add(existingIndex, result.CompactPath); - continue; - } + var entry = TryCreateVariableValues(result.CompactPath, startPosition, ref additionalPaths, nextIndex); - _seenTwoValueTuples[key] = nextIndex; + if (entry is null) + { + continue; } - variableValueSets[nextIndex++] = new VariableValues( - result.CompactPath, - new ObjectValueNode([ - new ObjectFieldNode(requirement1.Key, value1), - new ObjectFieldNode(requirement2.Key, value2) - ])); + variableValueSets[nextIndex++] = entry.Value; } - _seenTwoValueTuples.Clear(); + _variableDedupTable.Clear(); return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex); } @@ -1141,8 +1099,7 @@ private ImmutableArray BuildVariableValueSetsThreeRequirements( ReadOnlySpan elements, OperationRequirement requirement1, OperationRequirement requirement2, - OperationRequirement requirement3, - ref PooledArrayWriter? buffer) + OperationRequirement requirement3) { if (TryGetSimpleRequirementFieldName(requirement1.Map, out var fieldName1) && TryGetSimpleRequirementFieldName(requirement2.Map, out var fieldName2) @@ -1155,16 +1112,14 @@ private ImmutableArray BuildVariableValueSetsThreeRequirements( requirement2, fieldName2, requirement3, - fieldName3, - ref buffer); + fieldName3); } return BuildVariableValueSetsThreeRequirementsSlowPath( elements, requirement1, requirement2, - requirement3, - ref buffer); + requirement3); } private ImmutableArray BuildVariableValueSetsThreeRequirementsFastPath( @@ -1174,76 +1129,62 @@ private ImmutableArray BuildVariableValueSetsThreeRequirementsFa OperationRequirement requirement2, string fieldName2, OperationRequirement requirement3, - string fieldName3, - ref PooledArrayWriter? buffer) + string fieldName3) { VariableValues[]? variableValueSets = null; var additionalPaths = new AdditionalPathAccumulator(); var nextIndex = 0; - var seeded = false; foreach (var result in elements) { if (!result.TryGetProperty(fieldName1, out var value1) || value1.ValueKind is JsonValueKind.Undefined - || value1.ValueKind is JsonValueKind.Null - && requirement1.Type.Kind == SyntaxKind.NonNullType) + || (value1.ValueKind is JsonValueKind.Null + && requirement1.Type.Kind == SyntaxKind.NonNullType)) { continue; } if (!result.TryGetProperty(fieldName2, out var value2) || value2.ValueKind is JsonValueKind.Undefined - || value2.ValueKind is JsonValueKind.Null - && requirement2.Type.Kind == SyntaxKind.NonNullType) + || (value2.ValueKind is JsonValueKind.Null + && requirement2.Type.Kind == SyntaxKind.NonNullType)) { continue; } if (!result.TryGetProperty(fieldName3, out var value3) || value3.ValueKind is JsonValueKind.Undefined - || value3.ValueKind is JsonValueKind.Null - && requirement3.Type.Kind == SyntaxKind.NonNullType) + || (value3.ValueKind is JsonValueKind.Null + && requirement3.Type.Kind == SyntaxKind.NonNullType)) { continue; } - var mappedValue1 = MapRequirementLeafValue(value1, ref buffer); - var mappedValue2 = MapRequirementLeafValue(value2, ref buffer); - var mappedValue3 = MapRequirementLeafValue(value3, ref buffer); variableValueSets ??= new VariableValues[elements.Length]; - var key = new ThreeValueNodeTuple(mappedValue1, mappedValue2, mappedValue3); - if (nextIndex > 0) - { - if (!seeded) - { - _seenThreeValueTuples[new ThreeValueNodeTuple( - variableValueSets[0].Values.Fields[0].Value, - variableValueSets[0].Values.Fields[1].Value, - variableValueSets[0].Values.Fields[2].Value)] = 0; - seeded = true; - } + _jsonWriter.Reset(_variableWriter); + var startPosition = _variableWriter.Position; + _jsonWriter.WriteStartObject(); + _jsonWriter.WritePropertyName(requirement1.Key); + WriteCompositeResultValue(value1); + _jsonWriter.WritePropertyName(requirement2.Key); + WriteCompositeResultValue(value2); + _jsonWriter.WritePropertyName(requirement3.Key); + WriteCompositeResultValue(value3); + _jsonWriter.WriteEndObject(); - if (_seenThreeValueTuples.TryGetValue(key, out var existingIndex)) - { - additionalPaths.Add(existingIndex, result.CompactPath); - continue; - } + var entry = TryCreateVariableValues(result.CompactPath, startPosition, ref additionalPaths, nextIndex); - _seenThreeValueTuples[key] = nextIndex; + if (entry is null) + { + continue; } - variableValueSets[nextIndex++] = new VariableValues( - result.CompactPath, - new ObjectValueNode([ - new ObjectFieldNode(requirement1.Key, mappedValue1), - new ObjectFieldNode(requirement2.Key, mappedValue2), - new ObjectFieldNode(requirement3.Key, mappedValue3) - ])); + variableValueSets[nextIndex++] = entry.Value; } - _seenThreeValueTuples.Clear(); + _variableDedupTable.Clear(); return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex); } @@ -1251,128 +1192,58 @@ private ImmutableArray BuildVariableValueSetsThreeRequirementsSl ReadOnlySpan elements, OperationRequirement requirement1, OperationRequirement requirement2, - OperationRequirement requirement3, - ref PooledArrayWriter? buffer) + OperationRequirement requirement3) { VariableValues[]? variableValueSets = null; var additionalPaths = new AdditionalPathAccumulator(); var nextIndex = 0; - var seeded = false; foreach (var result in elements) { - var value1 = ResultDataMapper.Map(result, requirement1.Map, _schema, ref buffer); + variableValueSets ??= new VariableValues[elements.Length]; - if (value1 is null - || (value1.Kind == SyntaxKind.NullValue - && requirement1.Type.Kind == SyntaxKind.NonNullType)) - { - continue; - } + _jsonWriter.Reset(_variableWriter); + var startPosition = _variableWriter.Position; + _jsonWriter.WriteStartObject(); - var value2 = ResultDataMapper.Map(result, requirement2.Map, _schema, ref buffer); + _jsonWriter.WritePropertyName(requirement1.Key); - if (value2 is null - || (value2.Kind == SyntaxKind.NullValue - && requirement2.Type.Kind == SyntaxKind.NonNullType)) + if (!ResultDataMapper.TryMap(result, requirement1.Map, _schema, _jsonWriter)) { + _variableWriter.ResetTo(startPosition); continue; } - var value3 = ResultDataMapper.Map(result, requirement3.Map, _schema, ref buffer); + _jsonWriter.WritePropertyName(requirement2.Key); - if (value3 is null - || (value3.Kind == SyntaxKind.NullValue - && requirement3.Type.Kind == SyntaxKind.NonNullType)) + if (!ResultDataMapper.TryMap(result, requirement2.Map, _schema, _jsonWriter)) { + _variableWriter.ResetTo(startPosition); continue; } - variableValueSets ??= new VariableValues[elements.Length]; - var key = new ThreeValueNodeTuple(value1, value2, value3); + _jsonWriter.WritePropertyName(requirement3.Key); - if (nextIndex > 0) + if (!ResultDataMapper.TryMap(result, requirement3.Map, _schema, _jsonWriter)) { - if (!seeded) - { - _seenThreeValueTuples[new ThreeValueNodeTuple( - variableValueSets[0].Values.Fields[0].Value, - variableValueSets[0].Values.Fields[1].Value, - variableValueSets[0].Values.Fields[2].Value)] = 0; - seeded = true; - } - - if (_seenThreeValueTuples.TryGetValue(key, out var existingIndex)) - { - additionalPaths.Add(existingIndex, result.CompactPath); - continue; - } - - _seenThreeValueTuples[key] = nextIndex; + _variableWriter.ResetTo(startPosition); + continue; } - variableValueSets[nextIndex++] = new VariableValues( - result.CompactPath, - new ObjectValueNode([ - new ObjectFieldNode(requirement1.Key, value1), - new ObjectFieldNode(requirement2.Key, value2), - new ObjectFieldNode(requirement3.Key, value3) - ])); - } - - _seenThreeValueTuples.Clear(); - return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex); - } - - private ObjectValueNode? MapRequirements( - CompositeResultElement result, - IReadOnlyList forwardedVariables, - ReadOnlySpan requirements, - ref PooledArrayWriter? buffer) - { - var fieldCount = forwardedVariables.Count + requirements.Length; - - if (fieldCount == 0) - { - return new ObjectValueNode([]); - } + _jsonWriter.WriteEndObject(); - var fields = new ObjectFieldNode[fieldCount]; - var index = 0; - - for (var i = 0; i < forwardedVariables.Count; i++) - { - fields[index++] = forwardedVariables[i]; - } + var entry = TryCreateVariableValues(result.CompactPath, startPosition, ref additionalPaths, nextIndex); - foreach (var requirement in requirements) - { - var field = MapRequirement(result, requirement.Key, requirement.Map, ref buffer); - - if (field is null) + if (entry is null) { - return null; - } - - if (field.Value.Kind == SyntaxKind.NullValue && requirement.Type.Kind == SyntaxKind.NonNullType) - { - return null; + continue; } - fields[index++] = field; + variableValueSets[nextIndex++] = entry.Value; } - return new ObjectValueNode(fields); - } - - private ObjectFieldNode? MapRequirement( - CompositeResultElement result, - string key, - IValueSelectionNode path, - ref PooledArrayWriter? buffer) - { - var value = ResultDataMapper.Map(result, path, _schema, ref buffer); - return value is null ? null : new ObjectFieldNode(key, value); + _variableDedupTable.Clear(); + return FinalizeVariableValueSets(variableValueSets, ref additionalPaths, nextIndex); } private static bool TryGetSimpleRequirementFieldName( @@ -1397,13 +1268,112 @@ private static bool TryGetSimpleRequirementFieldName( return false; } - [MethodImpl(MethodImplOptions.AggressiveInlining)] - private static IValueNode MapRequirementLeafValue( - CompositeResultElement value, - ref PooledArrayWriter? buffer) - => value.ValueKind is JsonValueKind.String - ? ResultDataMapper.GetStringValueNode(value.AssertString()) - : ResultDataMapper.MapLeafValue(value, ref buffer); + private VariableValues? TryCreateVariableValues( + CompactPath path, + int startPosition, + ref AdditionalPathAccumulator additionalPaths, + int nextIndex) + { + var length = _variableWriter.Position - startPosition; + var hash = _variableWriter.GetHashCode(startPosition, length); + + // we we already track the sae variables we will track them as additional paths + // on the existing index. + // + // this allows us to fetch once and then insert the data at different locations. + if (_variableDedupTable.TryGet(hash, startPosition, length, out var existingIndex)) + { + additionalPaths.Add(existingIndex, path); + _variableWriter.ResetTo(startPosition); + return null; + } + + _variableDedupTable.Add(hash, nextIndex, startPosition, length); + return new VariableValues(path, JsonSegment.Create(_variableWriter, startPosition, length)); + } + + private void WriteValueNode(IValueNode value) + { + switch (value) + { + case NullValueNode: + _jsonWriter.WriteNullValue(); + break; + + case StringValueNode sv: + _jsonWriter.WriteStringValue(sv.Value); + break; + + case IntValueNode iv: + WriteRawAscii(iv.Value); + break; + + case FloatValueNode fv: + WriteRawAscii(fv.Value); + break; + + case BooleanValueNode bv: + _jsonWriter.WriteBooleanValue(bv.Value); + break; + + case EnumValueNode ev: + _jsonWriter.WriteStringValue(ev.Value); + break; + + case ObjectValueNode ov: + _jsonWriter.WriteStartObject(); + foreach (var field in ov.Fields) + { + _jsonWriter.WritePropertyName(field.Name.Value); + WriteValueNode(field.Value); + } + _jsonWriter.WriteEndObject(); + break; + + case ListValueNode lv: + _jsonWriter.WriteStartArray(); + foreach (var item in lv.Items) + { + WriteValueNode(item); + } + _jsonWriter.WriteEndArray(); + break; + + default: + _jsonWriter.WriteNullValue(); + break; + } + } + + private void WriteRawAscii(string value) + { + Span buffer = stackalloc byte[value.Length]; + System.Text.Encoding.UTF8.GetBytes(value.AsSpan(), buffer); + _jsonWriter.WriteRawValue(buffer); + } + + private void WriteCompositeResultValue(CompositeResultElement value) + => value.WriteTo(_jsonWriter); + + internal VariableValues CreateVariableValueSets( + CompactPath path, + IReadOnlyList fields) + { + _jsonWriter.Reset(_variableWriter); + var startPosition = _variableWriter.Position; + _jsonWriter.WriteStartObject(); + + for (var i = 0; i < fields.Count; i++) + { + var field = fields[i]; + _jsonWriter.WritePropertyName(field.Name.Value); + WriteValueNode(field.Value); + } + + _jsonWriter.WriteEndObject(); + var length = _variableWriter.Position - startPosition; + return new VariableValues(path, JsonSegment.Create(_variableWriter, startPosition, length)); + } private static void AppendUnrolledLists( CompositeResultElement list, @@ -1656,6 +1626,7 @@ public void Dispose() _memory.Clear(); + _variableWriter.Dispose(); _pathPool?.Dispose(); } @@ -1695,17 +1666,6 @@ private void ReturnPathSegments(CompactPath path, HashSet seen) } } - private sealed class SingleValueNodeComparer : IEqualityComparer - { - public static SingleValueNodeComparer Instance { get; } = new(); - - public bool Equals(IValueNode? x, IValueNode? y) - => SyntaxComparer.BySyntax.Equals(x, y); - - public int GetHashCode(IValueNode obj) - => SyntaxComparer.BySyntax.GetHashCode(obj); - } - private static ImmutableArray FinalizeVariableValueSets( VariableValues[]? variableValueSets, ref AdditionalPathAccumulator additionalPaths, @@ -1728,40 +1688,136 @@ private static ImmutableArray FinalizeVariableValueSets( return ImmutableCollectionsMarshal.AsImmutableArray(variableValueSets); } - private readonly record struct TwoValueNodeTuple(IValueNode Value1, IValueNode Value2); + private sealed class VariableDedupTable(ChunkedArrayWriter writer) : IDisposable + { + private const int DefaultBucketSize = 4; + private const int DefaultBucketCount = 16; - private readonly record struct ThreeValueNodeTuple( - IValueNode Value1, - IValueNode Value2, - IValueNode Value3); + private readonly ChunkedArrayWriter _writer = writer; + private Entry[] _table = ArrayPool.Shared.Rent(DefaultBucketCount * DefaultBucketSize); + private int _bucketCount = DefaultBucketCount; + private readonly int _bucketSize = DefaultBucketSize; - private sealed class TwoValueNodeTupleComparer : IEqualityComparer - { - public static TwoValueNodeTupleComparer Instance { get; } = new(); + public void Initialize(int capacity) + { + _bucketCount = NextPowerOfTwo(Math.Max(capacity, DefaultBucketCount)); + var totalSize = _bucketCount * _bucketSize; - public bool Equals(TwoValueNodeTuple x, TwoValueNodeTuple y) - => SyntaxComparer.BySyntax.Equals(x.Value1, y.Value1) - && SyntaxComparer.BySyntax.Equals(x.Value2, y.Value2); + if (_table.Length < totalSize) + { + ArrayPool.Shared.Return(_table); + _table = ArrayPool.Shared.Rent(totalSize); + } - public int GetHashCode(TwoValueNodeTuple obj) - => HashCode.Combine( - SyntaxComparer.BySyntax.GetHashCode(obj.Value1), - SyntaxComparer.BySyntax.GetHashCode(obj.Value2)); - } + _table.AsSpan(0, totalSize).Clear(); + } - private sealed class ThreeValueNodeTupleComparer : IEqualityComparer - { - public static ThreeValueNodeTupleComparer Instance { get; } = new(); - - public bool Equals(ThreeValueNodeTuple x, ThreeValueNodeTuple y) - => SyntaxComparer.BySyntax.Equals(x.Value1, y.Value1) - && SyntaxComparer.BySyntax.Equals(x.Value2, y.Value2) - && SyntaxComparer.BySyntax.Equals(x.Value3, y.Value3); - - public int GetHashCode(ThreeValueNodeTuple obj) - => HashCode.Combine( - SyntaxComparer.BySyntax.GetHashCode(obj.Value1), - SyntaxComparer.BySyntax.GetHashCode(obj.Value2), - SyntaxComparer.BySyntax.GetHashCode(obj.Value3)); + public bool TryGet( + int hash, + int location, + int length, + out int existingIndex) + { + var bucket = hash & 0x7FFFFFFF & (_bucketCount - 1); + var start = bucket * _bucketSize; + var end = start + _bucketSize; + + for (var s = start; s < end; s++) + { + ref var entry = ref _table[s]; + + if (entry.Index == 0) + { + existingIndex = -1; + return false; + } + + if (entry.Hash == hash + && entry.Length == length + && _writer.SequenceEqual(entry.Location, location, length)) + { + existingIndex = entry.Index - 1; + return true; + } + } + + existingIndex = -1; + return false; + } + + public void Add(int hash, int index, int location, int length) + { + var bucket = hash & 0x7FFFFFFF & (_bucketCount - 1); + var start = bucket * _bucketSize; + var end = start + _bucketSize; + + for (var s = start; s < end; s++) + { + ref var entry = ref _table[s]; + + if (entry.Index == 0) + { + entry.Hash = hash; + entry.Index = index + 1; + entry.Location = location; + entry.Length = length; + return; + } + } + + Grow(); + Add(hash, index, location, length); + } + + public void Clear() + => _table.AsSpan(0, _bucketCount * _bucketSize).Clear(); + + public void Dispose() + { + ArrayPool.Shared.Return(_table); + _table = []; + } + + private void Grow() + { + var oldTable = _table; + var oldTotal = _bucketCount * _bucketSize; + + _bucketCount *= 2; + var newTotal = _bucketCount * _bucketSize; + _table = ArrayPool.Shared.Rent(newTotal); + _table.AsSpan(0, newTotal).Clear(); + + for (var i = 0; i < oldTotal; i++) + { + var entry = oldTable[i]; + + if (entry.Index != 0) + { + Add(entry.Hash, entry.Index - 1, entry.Location, entry.Length); + } + } + + ArrayPool.Shared.Return(oldTable); + } + + private static int NextPowerOfTwo(int n) + { + n--; + n |= n >> 1; + n |= n >> 2; + n |= n >> 4; + n |= n >> 8; + n |= n >> 16; + return n + 1; + } + + private struct Entry + { + public int Hash; + public int Index; // 1-based (0 = empty) + public int Location; + public int Length; + } } } diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/ResultDataMapper.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/ResultDataMapper.cs index 54f2e3a555d..479b1ae542c 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/ResultDataMapper.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/Results/ResultDataMapper.cs @@ -1,346 +1,243 @@ -using System.Diagnostics.CodeAnalysis; -using System.Runtime.CompilerServices; using System.Text.Json; -using HotChocolate.Buffers; using HotChocolate.Fusion.Language; using HotChocolate.Fusion.Text.Json; -using HotChocolate.Language; using HotChocolate.Types; +using JsonWriter = HotChocolate.Text.Json.JsonWriter; namespace HotChocolate.Fusion.Execution.Results; internal static class ResultDataMapper { - private const int CachedNumericStringMax = 4096; - private static readonly StringValueNode[] s_cachedNumericStrings = CreateCachedNumericStrings(); - private static readonly IntValueNode[] s_cachedNumericIntValues = CreateCachedNumericIntValues(); - - public static IValueNode? Map( + /// + /// Maps a value selection from the composite result and writes it directly as JSON. + /// Returns true if the value was written successfully, false if the + /// value could not be resolved (undefined/null for required paths). + /// + public static bool TryMap( CompositeResultElement result, IValueSelectionNode valueSelection, ISchemaDefinition schema, - ref PooledArrayWriter? writer) - { - var context = new Context(schema, result, ref writer); - return Visit(valueSelection, context); - } + JsonWriter writer) + => Visit(valueSelection, result, schema, writer); - private static IValueNode? Visit(IValueSelectionNode node, Context context) + private static bool Visit( + IValueSelectionNode node, + CompositeResultElement result, + ISchemaDefinition schema, + JsonWriter writer) { switch (node) { case ChoiceValueSelectionNode choice: - return Visit(choice, context); + return VisitChoice(choice, result, schema, writer); case PathNode path: - return Visit(path, context); + return VisitPath(path, result, schema, writer); case ObjectValueSelectionNode objectValue: - return Visit(objectValue, context); + return VisitObject(objectValue, result, schema, writer); - case PathObjectValueSelectionNode objectValue: - return Visit(objectValue, context); + case PathObjectValueSelectionNode pathObject: + return VisitPathObject(pathObject, result, schema, writer); - case PathListValueSelectionNode listValue: - return Visit(listValue, context); + case PathListValueSelectionNode pathList: + return VisitPathList(pathList, result, schema, writer); default: throw new NotSupportedException("Unknown value selection node type."); } } - private static IValueNode? Visit(ChoiceValueSelectionNode node, Context context) + private static bool VisitChoice( + ChoiceValueSelectionNode node, + CompositeResultElement result, + ISchemaDefinition schema, + JsonWriter writer) { foreach (var branch in node.Branches) { - var value = Visit(branch, context); - - if (value is null) + if (Visit(branch, result, schema, writer)) { - continue; + return true; } - - return value; } - return null; + return false; } - private static IValueNode? Visit(PathNode node, Context context) + private static bool VisitPath( + PathNode node, + CompositeResultElement result, + ISchemaDefinition schema, + JsonWriter writer) { - var result = ResolvePath(context.Schema, context.Result, node); - var resultValueKind = result.ValueKind; + var resolved = ResolvePath(schema, result, node); + var valueKind = resolved.ValueKind; - if (resultValueKind is JsonValueKind.Undefined) + if (valueKind is JsonValueKind.Undefined) { - return null; + return false; } - if (resultValueKind is JsonValueKind.Null) + if (valueKind is JsonValueKind.Null) { - return NullValueNode.Default; + writer.WriteNullValue(); + return true; } - // Note: to capture data from the introspection - // system we would need to also cover raw field results. - if (result.Selection is { IsLeaf: true }) + if (valueKind is JsonValueKind.Array) { - return MapLeafValue(result, ref context.Writer); + WriteLeafArray(resolved, writer); + return true; } - throw new InvalidSelectionMapPathException(node); + writer.WriteRawValue(resolved.GetRawValue(includeQuotes: true)); + return true; } - internal static IValueNode MapLeafValue( - CompositeResultElement value, - ref PooledArrayWriter? writer) + private static void WriteLeafArray( + CompositeResultElement array, + JsonWriter writer) { - if (value.ValueKind is JsonValueKind.Array) + writer.WriteStartArray(); + + foreach (var item in array.EnumerateArray()) { - var items = new List(value.GetArrayLength()); - var parser = default(JsonValueParser); - var parserInitialized = false; + var itemKind = item.ValueKind; - foreach (var item in value.EnumerateArray()) + if (itemKind is JsonValueKind.Null) { - items.Add(ParseLeafValue(item, ref writer, ref parser, ref parserInitialized)); + writer.WriteNullValue(); + } + else if (itemKind is JsonValueKind.Array) + { + WriteLeafArray(item, writer); + } + else + { + writer.WriteRawValue(item.GetRawValue(includeQuotes: true)); } - - return new ListValueNode(items); - } - - var scalarParser = default(JsonValueParser); - var scalarParserInitialized = false; - return ParseLeafValue(value, ref writer, ref scalarParser, ref scalarParserInitialized); - } - - private static IValueNode ParseLeafValue( - CompositeResultElement value, - ref PooledArrayWriter? writer, - ref JsonValueParser parser, - ref bool parserInitialized) - { - switch (value.ValueKind) - { - case JsonValueKind.Null: - return NullValueNode.Default; - - case JsonValueKind.True: - return BooleanValueNode.True; - - case JsonValueKind.False: - return BooleanValueNode.False; - - case JsonValueKind.String: - return GetStringValueNode(value.AssertString()); - - case JsonValueKind.Number: - if (value.TryGetInt64(out var intValue)) - { - if ((ulong)intValue <= CachedNumericStringMax) - { - return s_cachedNumericIntValues[(int)intValue]; - } - - return new IntValueNode(intValue); - } - - goto default; - - default: - writer ??= new PooledArrayWriter(); - if (!parserInitialized) - { - parser = new JsonValueParser(buffer: writer); - parserInitialized = true; - } - - return parser.Parse(value.GetRawValue(includeQuotes: true)); - } - } - - internal static StringValueNode GetStringValueNode(string value) - { - if (TryGetCachedNumericString(value, out var cached)) - { - return cached; } - return new StringValueNode(value); + writer.WriteEndArray(); } - [MethodImpl(MethodImplOptions.AggressiveInlining)] - private static bool TryGetCachedNumericString( - string value, - [NotNullWhen(true)] out StringValueNode? cached) + private static bool VisitObject( + ObjectValueSelectionNode node, + CompositeResultElement result, + ISchemaDefinition schema, + JsonWriter writer) { - cached = null; - - var length = value.Length; - - if ((uint)(length - 1) > 3) + if (result.ValueKind is not JsonValueKind.Object) { - return false; + throw new InvalidOperationException("Only object results are supported."); } - var c0 = value[0]; - - if ((uint)(c0 - '0') > 9) - { - return false; - } + writer.WriteStartObject(); - if (length > 1 && c0 == '0') + foreach (var field in node.Fields) { - return false; - } + writer.WritePropertyName(field.Name.Value); - var parsed = c0 - '0'; + bool written; - for (var i = 1; i < length; i++) - { - var c = value[i]; + if (field.ValueSelection is null) + { + var pathNode = new PathNode(new PathSegmentNode(field.Name)); + written = VisitPath(pathNode, result, schema, writer); + } + else + { + written = Visit(field.ValueSelection, result, schema, writer); + } - if ((uint)(c - '0') > 9) + if (!written) { return false; } - - parsed = (parsed * 10) + (c - '0'); - } - - if ((uint)parsed > CachedNumericStringMax) - { - return false; } - cached = s_cachedNumericStrings[parsed]; + writer.WriteEndObject(); return true; } - private static StringValueNode[] CreateCachedNumericStrings() + private static bool VisitPathObject( + PathObjectValueSelectionNode node, + CompositeResultElement result, + ISchemaDefinition schema, + JsonWriter writer) { - var values = new StringValueNode[CachedNumericStringMax + 1]; + var resolved = ResolvePath(schema, result, node.Path); + var valueKind = resolved.ValueKind; - for (var i = 0; i < values.Length; i++) + if (valueKind is JsonValueKind.Null or JsonValueKind.Undefined) { - values[i] = new StringValueNode(i.ToString()); + return false; } - return values; - } - - private static IntValueNode[] CreateCachedNumericIntValues() - { - var values = new IntValueNode[CachedNumericStringMax + 1]; - - for (var i = 0; i < values.Length; i++) + if (valueKind is not JsonValueKind.Object) { - values[i] = new IntValueNode(i); + throw new InvalidOperationException("Only object results are supported."); } - return values; + return VisitObject(node.ObjectValueSelection, resolved, schema, writer); } - private static IValueNode? Visit(ObjectValueSelectionNode node, Context context) + private static bool VisitPathList( + PathListValueSelectionNode node, + CompositeResultElement result, + ISchemaDefinition schema, + JsonWriter writer) { - var result = context.Result; - var resultValueKind = result.ValueKind; - - if (resultValueKind is not JsonValueKind.Object) - { - throw new InvalidOperationException("Only object results are supported."); - } + var resolved = ResolvePath(schema, result, node.Path); + var valueKind = resolved.ValueKind; - var fields = new List(node.Fields.Length); - - foreach (var field in node.Fields) + switch (valueKind) { - var value = field.ValueSelection is null - ? Visit(new PathNode(new PathSegmentNode(field.Name)), context) - : Visit(field.ValueSelection, context); - - if (value is null) - { - return null; - } - - fields.Add(new ObjectFieldNode(field.Name.Value, value)); - } - - return new ObjectValueNode(fields); - } + case JsonValueKind.Undefined: + return false; - private static IValueNode? Visit(PathObjectValueSelectionNode node, Context context) - { - var result = ResolvePath(context.Schema, context.Result, node.Path); - var resultValueKind = result.ValueKind; + case JsonValueKind.Null: + writer.WriteNullValue(); + return true; - if (resultValueKind is JsonValueKind.Null or JsonValueKind.Undefined) - { - return null; - } + case JsonValueKind.Array: + return VisitList(node.ListValueSelection, resolved, schema, writer); - if (resultValueKind is not JsonValueKind.Object) - { - throw new InvalidOperationException("Only object results are supported."); + default: + return false; } - - return Visit(node.ObjectValueSelection, context.WithResult(result)); } - private static IValueNode? Visit(ListValueSelectionNode node, Context context) + private static bool VisitList( + ListValueSelectionNode node, + CompositeResultElement result, + ISchemaDefinition schema, + JsonWriter writer) { - var result = context.Result; - var resultValueKind = result.ValueKind; - - if (resultValueKind is not JsonValueKind.Array) + if (result.ValueKind is not JsonValueKind.Array) { - return null; + return false; } - var items = new List(result.GetArrayLength()); + writer.WriteStartArray(); foreach (var item in result.EnumerateArray()) { if (item.ValueKind is JsonValueKind.Null) { - items.Add(NullValueNode.Default); + writer.WriteNullValue(); continue; } - var value = Visit(node.ElementSelection, context.WithResult(item)); - - if (value is null) + if (!Visit(node.ElementSelection, item, schema, writer)) { - return null; + return false; } - - items.Add(value); } - return new ListValueNode(items); - } - - private static IValueNode? Visit(PathListValueSelectionNode node, Context context) - { - var result = ResolvePath(context.Schema, context.Result, node.Path); - var resultValueKind = result.ValueKind; - - switch (resultValueKind) - { - case JsonValueKind.Undefined: - return null; - - case JsonValueKind.Null: - return NullValueNode.Default; - - case JsonValueKind.Array: - return Visit(node.ListValueSelection, context.WithResult(result)); - - default: - return null; - } + writer.WriteEndArray(); + return true; } private static CompositeResultElement ResolvePath( @@ -375,6 +272,7 @@ private static CompositeResultElement ResolvePath( } var fieldResultValueKind = fieldResult.ValueKind; + if (fieldResultValueKind is JsonValueKind.Null) { return fieldResult; @@ -418,25 +316,4 @@ private static CompositeResultElement ResolvePath( return currentResult; } - - private readonly ref struct Context - { - private readonly ref PooledArrayWriter? _writer; - - public Context(ISchemaDefinition schema, CompositeResultElement result, ref PooledArrayWriter? writer) - { - Schema = schema; - Result = result; - _writer = ref writer; - } - - public ISchemaDefinition Schema { get; } - - public CompositeResultElement Result { get; } - - public ref PooledArrayWriter? Writer => ref _writer; - - public Context WithResult(CompositeResultElement result) - => new(Schema, result, ref _writer); - } } diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/VariableValues.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/VariableValues.cs index d7dcc86a1c6..8b6954c88c8 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/VariableValues.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Execution/VariableValues.cs @@ -1,13 +1,16 @@ using System.Collections.Immutable; using HotChocolate.Fusion.Text.Json; -using HotChocolate.Language; namespace HotChocolate.Fusion.Execution; -public sealed record VariableValues(CompactPath Path, ObjectValueNode Values) +public readonly record struct VariableValues(CompactPath Path, JsonSegment Values) { + public bool IsEmpty => Values.IsEmpty; + /// /// Gets the additional paths that share the same variable values as the primary . /// public ImmutableArray AdditionalPaths { get; init; } = []; + + public static VariableValues Empty => default; } diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/HotChocolate.Fusion.Execution.csproj b/src/HotChocolate/Fusion/src/Fusion.Execution/HotChocolate.Fusion.Execution.csproj index 1a27c2c7dfe..a2e5eb6dace 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/HotChocolate.Fusion.Execution.csproj +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/HotChocolate.Fusion.Execution.csproj @@ -23,7 +23,6 @@ - @@ -40,6 +39,49 @@ + + Transport\ContentType.cs + + + Transport\FileReference.cs + + + Transport\FileReferenceInfo.cs + + + Transport\FileReferenceNode.cs + + + Transport\IOperationRequest.cs + + + Transport\IRequestBody.cs + + + Transport\OperationBatchRequest.cs + + + Transport\OperationRequest.cs + + + Transport\TransportAbstractionResources.Designer.cs + TransportAbstractionResources.resx + + + Transport\Serialization\JsonOptionDefaults.cs + + + Transport\Serialization\Utf8GraphQLRequestProperties.cs + + + Transport\Serialization\Utf8GraphQLResultProperties.cs + + + Transport\Serialization\Utf8JsonWriterHelper.cs + + + Transport\VariableBatchRequest.cs + Transport\Http\DefaultGraphQLHttpClient.cs @@ -178,6 +220,11 @@ Transport\Http\HttpResources.Designer.cs HttpResources.resx + + Transport\TransportAbstractionResources.resx + ResXFileCodeGenerator + TransportAbstractionResources.Designer.cs + Transport\Http\HttpResources.resx ResXFileCodeGenerator @@ -197,4 +244,8 @@ + + + + diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/JsonSegment.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/JsonSegment.cs new file mode 100644 index 00000000000..71a10beb5bd --- /dev/null +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/JsonSegment.cs @@ -0,0 +1,129 @@ +using System.Buffers; +using HotChocolate.Buffers; +using HotChocolate.Text.Json; + +namespace HotChocolate.Fusion; + +public readonly struct JsonSegment : IEquatable +{ + private readonly ChunkedArrayWriter _memory; + private readonly int _location; + private readonly int _length; + + private JsonSegment(ChunkedArrayWriter memory, int location, int length) + { + _memory = memory; + _location = location; + _length = length; + } + + public bool IsEmpty => _memory is null; + + internal int Location => _location; + + internal int Length => _length; + + public ReadOnlySequence AsSequence() + { + if (IsEmpty) + { + return ReadOnlySequence.Empty; + } + + var start = _location; + var length = _length; + var first = _memory.Read(ref start, ref length); + + if (length == 0) + { + // Single chunk — common case, no allocation for segment chain. + return new ReadOnlySequence(first.ToArray()); + } + + // Multi-chunk — build a ReadOnlySequence from linked segments. + var firstSegment = new MemorySegment(first.ToArray()); + var lastSegment = firstSegment; + + do + { + lastSegment = lastSegment.Append(_memory.Read(ref start, ref length)); + } + while (length > 0); + + return new ReadOnlySequence(firstSegment, 0, lastSegment, lastSegment.Memory.Length); + } + + public void WriteTo(JsonWriter writer) + { + if (IsEmpty) + { + return; + } + + var start = _location; + var length = _length; + var first = _memory.Read(ref start, ref length); + + if (length == 0) + { + // Single chunk — common case. + writer.WriteRawValue(first); + return; + } + + // Multi-chunk — write start, continuations, then set separator flag. + writer.WriteRawValueStart(first); + + do + { + writer.WriteRawValueContinuation(_memory.Read(ref start, ref length)); + } + while (length > 0); + + writer.WriteRawValueEnd(); + } + + public bool Equals(JsonSegment other) + { + if (IsEmpty) + { + return other.IsEmpty; + } + + if (other.IsEmpty || !ReferenceEquals(_memory, other._memory)) + { + return false; + } + + return _memory.SequenceEqual(_location, other._location, _length); + } + + public override bool Equals(object? obj) + => obj is JsonSegment other && Equals(other); + + public override int GetHashCode() + => IsEmpty ? 0 : _memory.GetHashCode(_location, _length); + + public static JsonSegment Empty => default; + + internal static JsonSegment Create(ChunkedArrayWriter memory, int location, int length) + => new(memory, location, length); + + private sealed class MemorySegment : ReadOnlySequenceSegment + { + public MemorySegment(ReadOnlyMemory memory) + { + Memory = memory; + } + + public MemorySegment Append(ReadOnlySpan data) + { + var next = new MemorySegment(data.ToArray()) + { + RunningIndex = RunningIndex + Memory.Length + }; + Next = next; + return next; + } + } +} diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultElement.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultElement.cs index b895a06ac08..fe5b84fbe48 100644 --- a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultElement.cs +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultElement.cs @@ -35,9 +35,14 @@ public void WriteTo(IBufferWriter writer, bool indented = false) { var options = new JsonWriterOptions { Indented = indented }; var jsonWriter = new JsonWriter(writer, options); - var formatter = new CompositeResultDocument.RawJsonFormatter( - _parent, - jsonWriter); + WriteTo(jsonWriter); + } + + internal void WriteTo(JsonWriter jsonWriter) + { + CheckValidInstance(); + + var formatter = new CompositeResultDocument.RawJsonFormatter(_parent, jsonWriter); var row = _parent._metaDb.Get(_cursor); formatter.WriteValue(_cursor, row); } diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Transport/Http/FileEntry.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Transport/Http/FileEntry.cs new file mode 100644 index 00000000000..71b2d1ed902 --- /dev/null +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Transport/Http/FileEntry.cs @@ -0,0 +1,5 @@ +using HotChocolate.Types; + +namespace HotChocolate.Fusion.Transport.Http; + +public readonly record struct FileEntry(string Key, string Path, IFile file); diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Transport/Http/FileEntryBuilder.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Transport/Http/FileEntryBuilder.cs new file mode 100644 index 00000000000..fde7dc1b79c --- /dev/null +++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Transport/Http/FileEntryBuilder.cs @@ -0,0 +1,207 @@ +using System.Collections.Immutable; +using System.Text; +using System.Text.Json; +using HotChocolate.Text.Json; +using HotChocolate.Types; +using HotChocolate.Buffers; + +namespace HotChocolate.Fusion.Transport.Http; + +/// +/// Prepares variable JSON for the GraphQL multipart request specification by +/// extracting file references and producing the cleaned operations JSON with +/// null placeholders and the file map needed for multipart form construction. +/// +internal static class FileEntryBuilder +{ + private static ReadOnlySpan FileMarkerPrefix => "$.file("u8; + + public static (JsonSegment CleanedJson, ImmutableArray FileMap) Build( + ChunkedArrayWriter writer, + JsonSegment variables, + IFileLookup fileLookup, + string pathPrefix = "variables") + { + var fileEntries = ImmutableArray.CreateBuilder(); + var cleanedJson = Build(writer, variables, fileLookup, fileEntries, pathPrefix); + return (cleanedJson, fileEntries.ToImmutable()); + } + + public static JsonSegment Build( + ChunkedArrayWriter writer, + JsonSegment variables, + IFileLookup fileLookup, + ImmutableArray.Builder fileEntries, + string pathPrefix = "variables") + { + var sequence = variables.AsSequence(); + var reader = new Utf8JsonReader(sequence, isFinalBlock: true, default); + var startPosition = writer.Position; + var jsonWriter = new JsonWriter(writer, new JsonWriterOptions { Indented = false }); + var hasFiles = false; + var path = ParsePrefix(pathPrefix); + + while (reader.Read()) + { + switch (reader.TokenType) + { + case JsonTokenType.StartObject: + jsonWriter.WriteStartObject(); + break; + + case JsonTokenType.EndObject: + jsonWriter.WriteEndObject(); + path = PopAfterValue(path); + break; + + case JsonTokenType.StartArray: + jsonWriter.WriteStartArray(); + path = path.Append(0); + break; + + case JsonTokenType.EndArray: + jsonWriter.WriteEndArray(); + // Pop the array index, then pop after value (property name or parent array index). + path = path.Parent; + path = PopAfterValue(path); + break; + + case JsonTokenType.PropertyName: + jsonWriter.WritePropertyName(reader.ValueSpan); + path = path.Append(reader.GetString()!); + break; + + case JsonTokenType.String: + if (TryExtractFileKey(reader.ValueSpan, out var fileKey) + && fileLookup.TryGetFile(fileKey, out var file)) + { + fileEntries.Add(new FileEntry(fileKey, PrintPath(path), file)); + jsonWriter.WriteNullValue(); + hasFiles = true; + } + else + { + jsonWriter.WriteStringValue(reader.ValueSpan); + } + path = PopAfterValue(path); + break; + + case JsonTokenType.Number: + jsonWriter.WriteRawValue(reader.ValueSpan); + path = PopAfterValue(path); + break; + + case JsonTokenType.True: + jsonWriter.WriteBooleanValue(true); + path = PopAfterValue(path); + break; + + case JsonTokenType.False: + jsonWriter.WriteBooleanValue(false); + path = PopAfterValue(path); + break; + + case JsonTokenType.Null: + jsonWriter.WriteNullValue(); + path = PopAfterValue(path); + break; + } + } + + if (!hasFiles) + { + writer.ResetTo(startPosition); + return variables; + } + + return JsonSegment.Create(writer, startPosition, writer.Position - startPosition); + } + + private static Path ParsePrefix(string pathPrefix) + { + var path = Path.Root; + + foreach (var segment in pathPrefix.Split('.')) + { + path = int.TryParse(segment, out var index) + ? path.Append(index) + : path.Append(segment); + } + + return path; + } + + /// + /// After a value is consumed, pops the current path segment: + /// for object properties, removes the property name; + /// for array elements, advances to the next index. + /// + private static Path PopAfterValue(Path path) + { + if (path.Parent is not null && path is IndexerPathSegment indexer) + { + // Inside an array — advance to next index. + return path.Parent.Append(indexer.Index + 1); + } + + if (path.Parent is not null && path is NamePathSegment) + { + // Inside an object — pop the property name. + return path.Parent; + } + + return path; + } + + /// + /// Prints the path using dot notation (e.g., "variables.input.files.0") + /// as required by the GraphQL multipart request specification. + /// + private static string PrintPath(Path path) + { + var segments = path.ToList(); + + if (segments.Count == 0) + { + return string.Empty; + } + + var sb = new StringBuilder(); + + for (var i = 0; i < segments.Count; i++) + { + if (i > 0) + { + sb.Append('.'); + } + + switch (segments[i]) + { + case string name: + sb.Append(name); + break; + + case int index: + sb.Append(index); + break; + } + } + + return sb.ToString(); + } + + private static bool TryExtractFileKey(ReadOnlySpan value, out string key) + { + if (value.Length > FileMarkerPrefix.Length + 1 + && value.StartsWith(FileMarkerPrefix) + && value[^1] == (byte)')') + { + key = Encoding.UTF8.GetString( + value.Slice(FileMarkerPrefix.Length, value.Length - FileMarkerPrefix.Length - 1)); + return key.Length > 0; + } + + key = default!; + return false; + } +} diff --git a/src/HotChocolate/Fusion/test/Fusion.AspNetCore.Tests/PersistedOperationTests.cs b/src/HotChocolate/Fusion/test/Fusion.AspNetCore.Tests/PersistedOperationTests.cs index 6f456fc722f..c1fef501bc0 100644 --- a/src/HotChocolate/Fusion/test/Fusion.AspNetCore.Tests/PersistedOperationTests.cs +++ b/src/HotChocolate/Fusion/test/Fusion.AspNetCore.Tests/PersistedOperationTests.cs @@ -2,11 +2,13 @@ using HotChocolate.AspNetCore; using HotChocolate.Execution; using HotChocolate.Fusion.Configuration; -using HotChocolate.Fusion.Transport.Http; using HotChocolate.Language; using HotChocolate.PersistedOperations; using Microsoft.AspNetCore.Http; using Microsoft.Extensions.DependencyInjection; +using GraphQLHttpClient = HotChocolate.Transport.Http.GraphQLHttpClient; +using GraphQLHttpMethod = HotChocolate.Transport.Http.GraphQLHttpMethod; +using GraphQLHttpRequest = HotChocolate.Transport.Http.GraphQLHttpRequest; using OperationRequest = HotChocolate.Transport.OperationRequest; namespace HotChocolate.Fusion; @@ -33,9 +35,11 @@ public async Task HotChocolateStyle_MD5Hash_Success() var request = new OperationRequest(id: key.Value); // act - using var result = await client.PostAsync( - request, - new Uri("http://localhost:5000/graphql")); + using var result = await client.SendAsync( + new GraphQLHttpRequest(request, new Uri("http://localhost:5000/graphql")) + { + Method = GraphQLHttpMethod.Post + }); // arrange result.HttpResponseMessage.MatchSnapshot(); @@ -62,9 +66,11 @@ public async Task HotChocolateStyle_MD5Hash_NotFound() var request = new OperationRequest(id: key.Value); // act - using var result = await client.PostAsync( - request, - new Uri("http://localhost:5000/graphql")); + using var result = await client.SendAsync( + new GraphQLHttpRequest(request, new Uri("http://localhost:5000/graphql")) + { + Method = GraphQLHttpMethod.Post + }); // assert result.HttpResponseMessage.MatchSnapshot(); @@ -91,9 +97,11 @@ public async Task HotChocolateStyle_Sha1Hash_Success() var request = new OperationRequest(id: key.Value); // act - using var result = await client.PostAsync( - request, - new Uri("http://localhost:5000/graphql")); + using var result = await client.SendAsync( + new GraphQLHttpRequest(request, new Uri("http://localhost:5000/graphql")) + { + Method = GraphQLHttpMethod.Post + }); // assert result.HttpResponseMessage.MatchSnapshot(); @@ -120,9 +128,11 @@ public async Task HotChocolateStyle_Sha256Hash_Success() var request = new OperationRequest(id: key.Value); // act - using var result = await client.PostAsync( - request, - new Uri("http://localhost:5000/graphql")); + using var result = await client.SendAsync( + new GraphQLHttpRequest(request, new Uri("http://localhost:5000/graphql")) + { + Method = GraphQLHttpMethod.Post + }); // assert result.HttpResponseMessage.MatchSnapshot(); @@ -149,9 +159,11 @@ public async Task HotChocolateStyle_Sha256Hash_Query_Empty_String_Success() var request = new OperationRequest(query: string.Empty, id: key.Value); // act - using var result = await client.PostAsync( - request, - new Uri("http://localhost:5000/graphql")); + using var result = await client.SendAsync( + new GraphQLHttpRequest(request, new Uri("http://localhost:5000/graphql")) + { + Method = GraphQLHttpMethod.Post + }); // assert result.HttpResponseMessage.MatchSnapshot(); @@ -177,9 +189,11 @@ public async Task ApolloStyle_MD5Hash_Success() var request = CreateApolloStyleRequest(hashProvider.Name, key.Value); // act - using var result = await client.PostAsync( - request, - new Uri("http://localhost:5000/graphql")); + using var result = await client.SendAsync( + new GraphQLHttpRequest(request, new Uri("http://localhost:5000/graphql")) + { + Method = GraphQLHttpMethod.Post + }); // assert result.HttpResponseMessage.MatchSnapshot(); @@ -206,9 +220,11 @@ public async Task ApolloStyle_MD5Hash_NotFound() var request = CreateApolloStyleRequest(hashProvider.Name, key.Value); // act - using var result = await client.PostAsync( - request, - new Uri("http://localhost:5000/graphql")); + using var result = await client.SendAsync( + new GraphQLHttpRequest(request, new Uri("http://localhost:5000/graphql")) + { + Method = GraphQLHttpMethod.Post + }); // assert result.HttpResponseMessage.MatchSnapshot(); @@ -235,9 +251,11 @@ public async Task ApolloStyle_Sha1Hash_Success() var request = CreateApolloStyleRequest(hashProvider.Name, key.Value); // act - using var result = await client.PostAsync( - request, - new Uri("http://localhost:5000/graphql")); + using var result = await client.SendAsync( + new GraphQLHttpRequest(request, new Uri("http://localhost:5000/graphql")) + { + Method = GraphQLHttpMethod.Post + }); // assert result.HttpResponseMessage.MatchSnapshot(); @@ -264,9 +282,11 @@ public async Task ApolloStyle_Sha256Hash_Success() var request = CreateApolloStyleRequest(hashProvider.Name, key.Value); // act - using var result = await client.PostAsync( - request, - new Uri("http://localhost:5000/graphql")); + using var result = await client.SendAsync( + new GraphQLHttpRequest(request, new Uri("http://localhost:5000/graphql")) + { + Method = GraphQLHttpMethod.Post + }); // assert result.HttpResponseMessage.MatchSnapshot(); @@ -289,9 +309,11 @@ public async Task Standard_Query_By_Default_Works() var request = new OperationRequest(query: query); // act - using var result = await client.PostAsync( - request, - new Uri("http://localhost:5000/graphql")); + using var result = await client.SendAsync( + new GraphQLHttpRequest(request, new Uri("http://localhost:5000/graphql")) + { + Method = GraphQLHttpMethod.Post + }); // assert result.HttpResponseMessage.MatchSnapshot(); @@ -315,9 +337,11 @@ public async Task Standard_Query_Not_Allowed() var request = new OperationRequest(query: query); // act - using var result = await client.PostAsync( - request, - new Uri("http://localhost:5000/graphql")); + using var result = await client.SendAsync( + new GraphQLHttpRequest(request, new Uri("http://localhost:5000/graphql")) + { + Method = GraphQLHttpMethod.Post + }); // assert result.HttpResponseMessage.MatchSnapshot(); @@ -344,9 +368,11 @@ public async Task Standard_Query_Not_Allowed_Even_When_Persisted() var request = new OperationRequest(query: query); // act - using var result = await client.PostAsync( - request, - new Uri("http://localhost:5000/graphql")); + using var result = await client.SendAsync( + new GraphQLHttpRequest(request, new Uri("http://localhost:5000/graphql")) + { + Method = GraphQLHttpMethod.Post + }); // assert result.HttpResponseMessage.MatchSnapshot(); @@ -377,9 +403,11 @@ public async Task Standard_Query_Allowed_When_Persisted() var request = new OperationRequest(query: query); // act - using var result = await client.PostAsync( - request, - new Uri("http://localhost:5000/graphql")); + using var result = await client.SendAsync( + new GraphQLHttpRequest(request, new Uri("http://localhost:5000/graphql")) + { + Method = GraphQLHttpMethod.Post + }); // assert result.HttpResponseMessage.MatchSnapshot(); @@ -410,9 +438,11 @@ public async Task Standard_Query_Not_Allowed_Custom_Error() var request = new OperationRequest(query: query); // act - using var result = await client.PostAsync( - request, - new Uri("http://localhost:5000/graphql")); + using var result = await client.SendAsync( + new GraphQLHttpRequest(request, new Uri("http://localhost:5000/graphql")) + { + Method = GraphQLHttpMethod.Post + }); // assert result.HttpResponseMessage.MatchSnapshot(); @@ -437,9 +467,11 @@ public async Task Standard_Query_Not_Allowed_Override_Per_Request() var request = new OperationRequest(query: query); // act - using var result = await client.PostAsync( - request, - new Uri("http://localhost:5000/graphql")); + using var result = await client.SendAsync( + new GraphQLHttpRequest(request, new Uri("http://localhost:5000/graphql")) + { + Method = GraphQLHttpMethod.Post + }); // assert result.HttpResponseMessage.MatchSnapshot(); diff --git a/src/HotChocolate/Fusion/test/Fusion.Execution.Tests/Transport/Http/DefaultGraphQLHttpClientTests.cs b/src/HotChocolate/Fusion/test/Fusion.Execution.Tests/Transport/Http/DefaultGraphQLHttpClientTests.cs index 019979058f9..47c1133907d 100644 --- a/src/HotChocolate/Fusion/test/Fusion.Execution.Tests/Transport/Http/DefaultGraphQLHttpClientTests.cs +++ b/src/HotChocolate/Fusion/test/Fusion.Execution.Tests/Transport/Http/DefaultGraphQLHttpClientTests.cs @@ -1,6 +1,8 @@ +using System.Collections.Immutable; using System.Text; using System.Text.Json; -using HotChocolate.Transport; +using HotChocolate.Buffers; +using HotChocolate.Fusion.Execution; namespace HotChocolate.Fusion.Transport.Http; @@ -15,7 +17,7 @@ public async Task Fetch_Large_Json() await using var app = context.Item2; using var client = new DefaultGraphQLHttpClient(server.CreateClient(), disposeInnerClient: true); - var operationRequest = new OperationRequest("{ items }"); + var operationRequest = new OperationRequest("{ items }", null, null, null, VariableValues.Empty, JsonSegment.Empty); var request = new GraphQLHttpRequest(operationRequest, new Uri("http://localhost:5000/graphql")); // act @@ -44,7 +46,7 @@ public async Task ReadAsResult_Application_GraphQL_Response_Json_Response() "application/graphql-response+json"); using var client = new DefaultGraphQLHttpClient(new HttpClient(handler)); - var operationRequest = new OperationRequest("{ number }"); + var operationRequest = new OperationRequest("{ number }", null, null, null, VariableValues.Empty, JsonSegment.Empty); var request = new GraphQLHttpRequest(operationRequest, new Uri("http://localhost:5000/graphql")); // act @@ -72,7 +74,7 @@ public async Task ReadAsResult_Application_Json_Response() "application/json"); using var client = new DefaultGraphQLHttpClient(new HttpClient(handler)); - var operationRequest = new OperationRequest("{ number }"); + var operationRequest = new OperationRequest("{ number }", null, null, null, VariableValues.Empty, JsonSegment.Empty); var request = new GraphQLHttpRequest(operationRequest, new Uri("http://localhost:5000/graphql")); // act @@ -100,7 +102,7 @@ public async Task ReadAsResultStream_Single_Application_GraphQL_Response_Json_Re "application/graphql-response+json"); using var client = new DefaultGraphQLHttpClient(new HttpClient(handler)); - var operationRequest = new OperationRequest("{ number }"); + var operationRequest = new OperationRequest("{ number }", null, null, null, VariableValues.Empty, JsonSegment.Empty); var request = new GraphQLHttpRequest(operationRequest, new Uri("http://localhost:5000/graphql")); // act @@ -137,7 +139,7 @@ public async Task ReadAsResultStream_Single_Application_Json_Response() "application/json"); using var client = new DefaultGraphQLHttpClient(new HttpClient(handler)); - var operationRequest = new OperationRequest("{ number }"); + var operationRequest = new OperationRequest("{ number }", null, null, null, VariableValues.Empty, JsonSegment.Empty); var request = new GraphQLHttpRequest(operationRequest, new Uri("http://localhost:5000/graphql")); // act @@ -178,7 +180,7 @@ public async Task ReadAsResultStream_Single_Application_Json_Apollo_Request_Batc var operationRequest = new OperationBatchRequest( [ - new OperationRequest("{ number }") + new OperationRequest("{ number }", null, null, null, VariableValues.Empty, JsonSegment.Empty) ]); var request = new GraphQLHttpRequest(operationRequest, new Uri("http://localhost:5000/graphql")); @@ -225,8 +227,8 @@ public async Task ReadAsResultStream_Multi_Application_Json_Apollo_Request_Batch var operationRequest = new OperationBatchRequest( [ - new OperationRequest("{ number }"), - new OperationRequest("{ number }") + new OperationRequest("{ number }", null, null, null, VariableValues.Empty, JsonSegment.Empty), + new OperationRequest("{ number }", null, null, null, VariableValues.Empty, JsonSegment.Empty) ]); var request = new GraphQLHttpRequest(operationRequest, new Uri("http://localhost:5000/graphql")); @@ -267,9 +269,11 @@ public async Task ReadAsResultStream_Single_Application_Json_Lines_Response() var operationRequest = new VariableBatchRequest( "{ number }", - variables: [ - new Dictionary() - ]); + null, + null, + null, + ImmutableArray.Create(VariableValues.Empty), + JsonSegment.Empty); var request = new GraphQLHttpRequest(operationRequest, new Uri("http://localhost:5000/graphql")); // act @@ -311,10 +315,11 @@ public async Task ReadAsResultStream_Multi_Application_Json_Lines_Response() var operationRequest = new VariableBatchRequest( "{ number }", - variables: [ - new Dictionary(), - new Dictionary() - ]); + null, + null, + null, + ImmutableArray.Create(VariableValues.Empty, VariableValues.Empty), + JsonSegment.Empty); var request = new GraphQLHttpRequest(operationRequest, new Uri("http://localhost:5000/graphql")); // act @@ -358,7 +363,7 @@ public async Task ReadAsResultStream_Single_Text_Event_Stream_Response() "text/event-stream"); using var client = new DefaultGraphQLHttpClient(new HttpClient(handler)); - var operationRequest = new OperationRequest("{ number }"); + var operationRequest = new OperationRequest("{ number }", null, null, null, VariableValues.Empty, JsonSegment.Empty); var request = new GraphQLHttpRequest(operationRequest, new Uri("http://localhost:5000/graphql")); // act @@ -407,7 +412,7 @@ public async Task ReadAsResultStream_Multi_Text_Event_Stream_Response() "text/event-stream"); using var client = new DefaultGraphQLHttpClient(new HttpClient(handler)); - var operationRequest = new OperationRequest("{ number }"); + var operationRequest = new OperationRequest("{ number }", null, null, null, VariableValues.Empty, JsonSegment.Empty); var request = new GraphQLHttpRequest(operationRequest, new Uri("http://localhost:5000/graphql")); // act @@ -452,12 +457,15 @@ public async Task Post_Variables_Do_Not_Escape_Apostrophe_To_Unicode() + "a glossy finish that accentuates its sculptural silhouette. Balancing softness with warmth, this " + "palette extends Curry\u2019s legacy with a fresh, contemporary expression. It's still iconic."; + var variableValues = CreateVariableValues( + new Dictionary { ["description"] = description }); var operationRequest = new OperationRequest( "mutation($description: String!) { updateDescription(description: $description) }", - variables: new Dictionary - { - ["description"] = description - }); + null, + null, + null, + variableValues, + JsonSegment.Empty); var request = new GraphQLHttpRequest(operationRequest, new Uri("http://localhost:5000/graphql")); @@ -478,6 +486,17 @@ public async Task Post_Variables_Do_Not_Escape_Apostrophe_To_Unicode() Assert.Equal(description, serializedDescription); } + private static VariableValues CreateVariableValues(Dictionary variables) + { + var writer = new ChunkedArrayWriter(); + var startPosition = writer.Position; + using var jsonWriter = new Utf8JsonWriter(writer, new JsonWriterOptions { Encoder = System.Text.Encodings.Web.JavaScriptEncoder.UnsafeRelaxedJsonEscaping }); + JsonSerializer.Serialize(jsonWriter, variables); + jsonWriter.Flush(); + var length = writer.Position - startPosition; + return new VariableValues(default, JsonSegment.Create(writer, startPosition, length)); + } + private class MockHttpMessageHandler(Stream responseStream, string contentType) : HttpMessageHandler { public MockHttpMessageHandler(string responseContent, string contentType) diff --git a/src/HotChocolate/Json/src/Json/JsonWriter.cs b/src/HotChocolate/Json/src/Json/JsonWriter.cs index 60af6db0381..028ec2f9b14 100644 --- a/src/HotChocolate/Json/src/Json/JsonWriter.cs +++ b/src/HotChocolate/Json/src/Json/JsonWriter.cs @@ -478,6 +478,50 @@ public void WriteRawValue(ReadOnlySpan utf8Json) SetFlagToAddListSeparatorBeforeNextItem(); } + /// + /// Begins writing a raw value that may be provided in multiple chunks. + /// Writes the list separator if needed and prepares for subsequent + /// calls. + /// + /// The first chunk of raw UTF-8 encoded JSON. + internal void WriteRawValueStart(ReadOnlySpan utf8Json) + { + FlushDeferredPropertyName(); + + var maxRequired = utf8Json.Length + 1; + var bytesWritten = 0; + + var output = _writer.GetSpan(maxRequired); + + if (_currentDepth < 0) + { + output[bytesWritten++] = JsonConstants.Comma; + } + + utf8Json.CopyTo(output[bytesWritten..]); + bytesWritten += utf8Json.Length; + + _writer.Advance(bytesWritten); + } + + /// + /// Continues writing raw bytes for a value started with . + /// Does not write separators or update token state. + /// + /// The next chunk of raw UTF-8 encoded JSON. + internal void WriteRawValueContinuation(ReadOnlySpan utf8Json) + { + var output = _writer.GetSpan(utf8Json.Length); + utf8Json.CopyTo(output); + _writer.Advance(utf8Json.Length); + } + + /// + /// Completes a multi-chunk raw value write started with . + /// Sets the list separator flag for the next item. + /// + internal void WriteRawValueEnd() => SetFlagToAddListSeparatorBeforeNextItem(); + /// /// Internal buffer used for deferred property name writes. /// diff --git a/src/HotChocolate/Utilities/src/Utilities.Buffers/ChunkedArrayWriter.cs b/src/HotChocolate/Utilities/src/Utilities.Buffers/ChunkedArrayWriter.cs new file mode 100644 index 00000000000..18a2f80e68a --- /dev/null +++ b/src/HotChocolate/Utilities/src/Utilities.Buffers/ChunkedArrayWriter.cs @@ -0,0 +1,638 @@ +using System.Buffers; +using System.Runtime.CompilerServices; +#if NET8_0_OR_GREATER +using System.Runtime.InteropServices; +using System.Runtime.Intrinsics; +#endif + +namespace HotChocolate.Buffers; + +/// +/// An that grows by renting additional fixed-size chunks +/// from instead of resizing and copying like . +/// Uses cursor-based addressing where a single location maps to a chunk index +/// and offset via shift and mask operations on . +/// +internal sealed class ChunkedArrayWriter : IBufferWriter, IDisposable +{ + private const int BufferSize = JsonMemory.BufferSize; + private const int BufferMask = BufferSize - 1; + private const int BufferShift = 17; // log2(131072) = 17 + private const int DefaultScratchSize = 128; + private const int SimdThreshold = 32; + + private byte[][] _chunks; + private int _chunkCount; + private int _currentChunk; + private int _currentChunkOffset; + private byte[] _scratch; + private bool _advanceFromScratch; + private bool _disposed; + + public ChunkedArrayWriter() + { + _chunks = ArrayPool.Shared.Rent(4); + _chunks[0] = JsonMemory.Rent(JsonMemoryKind.Variables); + _chunkCount = 1; + _scratch = new byte[DefaultScratchSize]; + } + + /// + /// Gets the current write position as a flat cursor. + /// + public int Position + { + [MethodImpl(MethodImplOptions.AggressiveInlining)] + get => (_currentChunk << BufferShift) | _currentChunkOffset; + } + + /// + /// Gets the total number of bytes written. + /// + public int Length => Position; + + /// + public Span GetSpan(int sizeHint = 0) + { + var size = sizeHint < 1 ? DefaultScratchSize : sizeHint; + var chunk = _chunks[_currentChunk]; + var remaining = BufferSize - _currentChunkOffset; + _advanceFromScratch = false; + + if (remaining == 0) + { + MoveToNextChunk(); + chunk = _chunks[_currentChunk]; + remaining = BufferSize; + } + + if (size <= remaining) + { + return chunk.AsSpan(_currentChunkOffset); + } + + // The requested size exceeds the remaining space in this chunk. + // Return a scratch buffer; on Advance we copy into chunks. + if (size > _scratch.Length) + { + _scratch = new byte[size]; + } + + _advanceFromScratch = true; + return _scratch; + } + + /// + public Memory GetMemory(int sizeHint = 0) + { + var size = sizeHint < 1 ? DefaultScratchSize : sizeHint; + var chunk = _chunks[_currentChunk]; + var remaining = BufferSize - _currentChunkOffset; + _advanceFromScratch = false; + + if (remaining == 0) + { + MoveToNextChunk(); + chunk = _chunks[_currentChunk]; + remaining = BufferSize; + } + + if (size <= remaining) + { + return chunk.AsMemory(_currentChunkOffset); + } + + if (size > _scratch.Length) + { + _scratch = new byte[size]; + } + + _advanceFromScratch = true; + return _scratch; + } + + /// + public void Advance(int count) + { + if (!_advanceFromScratch) + { + _currentChunkOffset += count; + return; + } + + _advanceFromScratch = false; + var source = _scratch.AsSpan(0, count); + + while (source.Length > 0) + { + var chunk = _chunks[_currentChunk]; + var remaining = BufferSize - _currentChunkOffset; + + if (remaining == 0) + { + MoveToNextChunk(); + chunk = _chunks[_currentChunk]; + remaining = BufferSize; + } + + var take = Math.Min(source.Length, remaining); + source.Slice(0, take).CopyTo(chunk.AsSpan(_currentChunkOffset, take)); + _currentChunkOffset += take; + source = source.Slice(take); + } + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public ReadOnlySpan Read(ref int start, ref int length) + { + var chunkIndex = start >> BufferShift; + var offsetInChunk = start & BufferMask; + var available = BufferSize - offsetInChunk; + + if (available >= length) + { + var span = _chunks[chunkIndex].AsSpan(offsetInChunk, length); + length = 0; + return span; + } + + start = (chunkIndex + 1) << BufferShift; + length -= available; + return _chunks[chunkIndex].AsSpan(offsetInChunk, available); + } + + /// + /// Compares an external span against a written segment at the specified location. + /// Handles segments that span chunk boundaries. + /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public bool SequenceEqual(ReadOnlySpan span, int location, int length) + { + if (span.Length != length) + { + return false; + } + + if (length == 0) + { + return true; + } + + var chunkIndex = location >> BufferShift; + var offsetInChunk = location & BufferMask; + var availableInChunk = BufferSize - offsetInChunk; + + // Fast path: segment is entirely within one chunk. + if (availableInChunk >= length) + { + return span.SequenceEqual( + _chunks[chunkIndex].AsSpan(offsetInChunk, length)); + } + + return SequenceEqualMultiChunk(span, chunkIndex, offsetInChunk, length); + } + + private bool SequenceEqualMultiChunk( + ReadOnlySpan span, + int chunkIndex, + int offsetInChunk, + int remaining) + { + var spanOffset = 0; + + while (remaining > 0) + { + var available = BufferSize - offsetInChunk; + var toCompare = Math.Min(remaining, available); + + if (!span.Slice(spanOffset, toCompare).SequenceEqual( + _chunks[chunkIndex].AsSpan(offsetInChunk, toCompare))) + { + return false; + } + + spanOffset += toCompare; + remaining -= toCompare; + chunkIndex++; + offsetInChunk = 0; + } + + return true; + } + + /// + /// Compares two written segments for equality. + /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public bool SequenceEqual(int locationA, int locationB, int length) + { + if (locationA == locationB) + { + return true; + } + + if (length == 0) + { + return true; + } + + var chunkA = locationA >> BufferShift; + var offsetA = locationA & BufferMask; + var availA = BufferSize - offsetA; + var chunkB = locationB >> BufferShift; + var offsetB = locationB & BufferMask; + var availB = BufferSize - offsetB; + + // Fast path: both segments within their respective single chunks. + if (availA >= length && availB >= length) + { + return _chunks[chunkA].AsSpan(offsetA, length).SequenceEqual( + _chunks[chunkB].AsSpan(offsetB, length)); + } + + return SequenceEqualMultiChunkTwoSegments(chunkA, offsetA, chunkB, offsetB, length); + } + + private bool SequenceEqualMultiChunkTwoSegments( + int chunkA, int offsetA, + int chunkB, int offsetB, + int remaining) + { + while (remaining > 0) + { + var availA = BufferSize - offsetA; + var availB = BufferSize - offsetB; + var toCompare = Math.Min(remaining, Math.Min(availA, availB)); + + if (!_chunks[chunkA].AsSpan(offsetA, toCompare).SequenceEqual( + _chunks[chunkB].AsSpan(offsetB, toCompare))) + { + return false; + } + + remaining -= toCompare; + offsetA += toCompare; + offsetB += toCompare; + + if (offsetA >= BufferSize) + { + chunkA++; + offsetA = 0; + } + + if (offsetB >= BufferSize) + { + chunkB++; + offsetB = 0; + } + } + + return true; + } + + /// + /// Computes a hash code for a written segment using hash * 31 + b. + /// Handles segments that span chunk boundaries. + /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public int GetHashCode(int location, int length) + { + if (length == 0) + { + return 0; + } + + var chunkIndex = location >> BufferShift; + var offsetInChunk = location & BufferMask; + var availableInChunk = BufferSize - offsetInChunk; + + // Fast path: segment is entirely within one chunk. + if (availableInChunk >= length) + { + return (int)(ComputeHashCore( + 0u, _chunks[chunkIndex].AsSpan(offsetInChunk, length)) & 0x7FFFFFFF); + } + + return GetHashCodeMultiChunk(chunkIndex, offsetInChunk, length); + } + + private int GetHashCodeMultiChunk(int chunkIndex, int offsetInChunk, int remaining) + { + var hash = 0u; + + while (remaining > 0) + { + var toHash = Math.Min(remaining, BufferSize - offsetInChunk); + hash = ComputeHashCore(hash, _chunks[chunkIndex].AsSpan(offsetInChunk, toHash)); + remaining -= toHash; + chunkIndex++; + offsetInChunk = 0; + } + + return (int)(hash & 0x7FFFFFFF); + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static uint ComputeHashCore(uint hash, ReadOnlySpan bytes) + { +#if NET8_0_OR_GREATER + if (bytes.Length >= SimdThreshold) + { + return ComputeHashSimd(hash, bytes); + } +#endif + unchecked + { + foreach (var b in bytes) + { + hash = (hash * 31) + b; + } + } + + return hash; + } + +#if NET8_0_OR_GREATER + private static uint ComputeHashSimd(uint hash, ReadOnlySpan bytes) + { + unchecked + { + const uint pow31_1 = 31; + const uint pow31_2 = 31 * 31; + const uint pow31_3 = 31 * 31 * 31; + const uint pow31_4 = 31 * 31 * 31 * 31; + const uint pow31_5 = pow31_4 * 31; + const uint pow31_6 = pow31_5 * 31; + const uint pow31_7 = pow31_6 * 31; + const uint pow31_8 = pow31_7 * 31; + + ref var src = ref MemoryMarshal.GetReference(bytes); + var i = 0; + + if (Vector256.IsHardwareAccelerated && bytes.Length >= 64) + { + var acc = Vector256.Zero; + var mul = Vector256.Create(pow31_8); + var simdEnd = bytes.Length & ~7; + + for (; i < simdEnd; i += 8) + { + var raw = Vector128.CreateScalarUnsafe( + Unsafe.ReadUnaligned(ref Unsafe.Add(ref src, i))).AsByte(); + var (loShort, _) = Vector128.Widen(raw); + var (lo32, hi32) = Vector128.Widen(loShort); + var wide = Vector256.Create(lo32, hi32); + + acc = (acc * mul) + wide; + } + + var finalPow = Vector256.Create( + pow31_7, pow31_6, pow31_5, pow31_4, + pow31_3, pow31_2, pow31_1, 1u); + acc *= finalPow; + + var sum128 = acc.GetLower() + acc.GetUpper(); + var t = sum128 + Vector128.Shuffle(sum128, Vector128.Create(2u, 3u, 0u, 1u)); + var simdResult = (t + Vector128.Shuffle(t, Vector128.Create(1u, 0u, 3u, 2u))).ToScalar(); + + hash = (hash * Pow31(simdEnd)) + simdResult; + } + + if (Vector128.IsHardwareAccelerated && bytes.Length - i >= 4) + { + var acc = Vector128.Zero; + var mul = Vector128.Create(pow31_4); + var simdEnd = i + ((bytes.Length - i) & ~3); + var simdStart = i; + + for (; i < simdEnd; i += 4) + { + var raw = Vector128.CreateScalarUnsafe( + Unsafe.ReadUnaligned(ref Unsafe.Add(ref src, i))).AsByte(); + var (loShort, _) = Vector128.Widen(raw); + var (wide, _) = Vector128.Widen(loShort); + + acc = (acc * mul) + wide; + } + + var finalPow = Vector128.Create(pow31_3, pow31_2, pow31_1, 1u); + acc *= finalPow; + + var t = acc + Vector128.Shuffle(acc, Vector128.Create(2u, 3u, 0u, 1u)); + var simdResult = (t + Vector128.Shuffle(t, Vector128.Create(1u, 0u, 3u, 2u))).ToScalar(); + + hash = (hash * Pow31(simdEnd - simdStart)) + simdResult; + } + + // Scalar tail for remaining bytes. + for (; i < bytes.Length; i++) + { + hash = (hash * 31) + Unsafe.Add(ref src, i); + } + + return hash; + } + } + + private static uint Pow31(int n) + { + unchecked + { + var result = 1u; + var b = 31u; + + while (n > 0) + { + if ((n & 1) != 0) + { + result *= b; + } + + b *= b; + n >>= 1; + } + + return result; + } + } +#endif + + /// + /// Writes a previously written segment to the specified target buffer writer. + /// Handles segments that span chunk boundaries. + /// + public void WriteTo(IBufferWriter target, int location, int length) + { +#if NET8_0_OR_GREATER + ArgumentNullException.ThrowIfNull(target); +#else + if (target is null) + { + throw new ArgumentNullException(nameof(target)); + } +#endif + + var remaining = length; + var chunkIndex = location >> BufferShift; + var offsetInChunk = location & BufferMask; + + while (remaining > 0) + { + var available = BufferSize - offsetInChunk; + var toCopy = Math.Min(remaining, available); + + var destination = target.GetSpan(toCopy); + _chunks[chunkIndex].AsSpan(offsetInChunk, toCopy).CopyTo(destination); + target.Advance(toCopy); + + remaining -= toCopy; + chunkIndex++; + offsetInChunk = 0; + } + } + + /// + /// Copies a written segment to the specified destination span. + /// Handles segments that span chunk boundaries. + /// + public void CopyTo(Span destination, int location, int length) + { + if (destination.Length < length) + { + throw new ArgumentException("Destination span is too small.", nameof(destination)); + } + + var remaining = length; + var chunkIndex = location >> BufferShift; + var offsetInChunk = location & BufferMask; + var destOffset = 0; + + while (remaining > 0) + { + var available = BufferSize - offsetInChunk; + var toCopy = Math.Min(remaining, available); + + _chunks[chunkIndex].AsSpan(offsetInChunk, toCopy).CopyTo(destination.Slice(destOffset, toCopy)); + + destOffset += toCopy; + remaining -= toCopy; + chunkIndex++; + offsetInChunk = 0; + } + } + + /// + /// Reads as much data as possible from the current chunk at the given location. + /// If all requested data fits in one chunk, returns true and + /// contains the complete segment. If the data crosses a chunk boundary, returns false, + /// contains the portion from the first chunk, and + /// / indicate where to continue. + /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public bool TryRead( + int location, + int length, + out ReadOnlySpan span, + out int remaining, + out int nextLocation) + { + var chunkIndex = location >> BufferShift; + var offsetInChunk = location & BufferMask; + var available = BufferSize - offsetInChunk; + + if (available >= length) + { + span = _chunks[chunkIndex].AsSpan(offsetInChunk, length); + remaining = 0; + nextLocation = location + length; + return true; + } + + span = _chunks[chunkIndex].AsSpan(offsetInChunk, available); + remaining = length - available; + nextLocation = (chunkIndex + 1) << BufferShift; + return false; + } + + /// + /// Resets the write position to a specific location. + /// Used for rewinding after a duplicate is detected during dedup. + /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void ResetTo(int position) + { + _currentChunk = position >> BufferShift; + _currentChunkOffset = position & BufferMask; + _advanceFromScratch = false; + } + + /// + /// Resets the write position to the beginning. + /// All rented chunks are kept for reuse. + /// + public void Reset() + { + _currentChunk = 0; + _currentChunkOffset = 0; + _advanceFromScratch = false; + } + + /// + /// Returns excess chunks beyond the first one. + /// Call this when the owning store is returned to a pool. + /// + public void Clean() + { + for (var i = 1; i < _chunkCount; i++) + { + JsonMemory.Return(JsonMemoryKind.Variables, _chunks[i]); + _chunks[i] = null!; + } + + _chunkCount = Math.Min(_chunkCount, 1); + _currentChunk = 0; + _currentChunkOffset = 0; + _advanceFromScratch = false; + } + + /// + /// Returns all rented chunks and the chunk array to their pools. + /// + public void Dispose() + { + if (!_disposed) + { + for (var i = 0; i < _chunkCount; i++) + { + JsonMemory.Return(JsonMemoryKind.Variables, _chunks[i]); + _chunks[i] = null!; + } + + ArrayPool.Shared.Return(_chunks, clearArray: true); + _chunkCount = 0; + _currentChunk = 0; + _currentChunkOffset = 0; + _disposed = true; + } + } + + private void MoveToNextChunk() + { + _currentChunk++; + _currentChunkOffset = 0; + + if (_currentChunk >= _chunks.Length) + { + var newChunks = ArrayPool.Shared.Rent(_chunks.Length * 2); + Array.Copy(_chunks, newChunks, _chunkCount); + ArrayPool.Shared.Return(_chunks, clearArray: true); + _chunks = newChunks; + } + + if (_currentChunk >= _chunkCount) + { + _chunks[_currentChunk] = JsonMemory.Rent(JsonMemoryKind.Variables); + _chunkCount = _currentChunk + 1; + } + } +} diff --git a/src/HotChocolate/Utilities/src/Utilities.Buffers/HotChocolate.Utilities.Buffers.csproj b/src/HotChocolate/Utilities/src/Utilities.Buffers/HotChocolate.Utilities.Buffers.csproj index c5675ba4db8..a3eda07d0ba 100644 --- a/src/HotChocolate/Utilities/src/Utilities.Buffers/HotChocolate.Utilities.Buffers.csproj +++ b/src/HotChocolate/Utilities/src/Utilities.Buffers/HotChocolate.Utilities.Buffers.csproj @@ -22,6 +22,10 @@ + + + + diff --git a/src/HotChocolate/Utilities/src/Utilities.Buffers/JsonMemoryKind.cs b/src/HotChocolate/Utilities/src/Utilities.Buffers/JsonMemoryKind.cs index d4016d2f620..157ca114684 100644 --- a/src/HotChocolate/Utilities/src/Utilities.Buffers/JsonMemoryKind.cs +++ b/src/HotChocolate/Utilities/src/Utilities.Buffers/JsonMemoryKind.cs @@ -3,5 +3,6 @@ namespace HotChocolate.Buffers; internal enum JsonMemoryKind { Metadata = 1, - Json = 2 + Json = 2, + Variables = 3 } diff --git a/src/HotChocolate/Utilities/test/Utilities.Tests/ChunkedArrayWriterHashTests.cs b/src/HotChocolate/Utilities/test/Utilities.Tests/ChunkedArrayWriterHashTests.cs new file mode 100644 index 00000000000..4d1036dfa4c --- /dev/null +++ b/src/HotChocolate/Utilities/test/Utilities.Tests/ChunkedArrayWriterHashTests.cs @@ -0,0 +1,196 @@ +using System.Buffers; +using HotChocolate.Buffers; + +namespace HotChocolate.Utilities; + +public class ChunkedArrayWriterHashTests +{ + /// + /// Reference scalar implementation for verification. + /// + private static int ScalarHash(byte[] data) + { + var hash = 0u; + + foreach (var b in data) + { + hash = hash * 31 + b; + } + + return (int)(hash & 0x7FFFFFFF); + } + + private static ChunkedArrayWriter CreateWriterWithData(byte[] data) + { + var writer = new ChunkedArrayWriter(); + var span = writer.GetSpan(data.Length); + data.CopyTo(span); + writer.Advance(data.Length); + return writer; + } + + [Fact] + public void GetHashCode_EmptySegment_ReturnsZero() + { + using var writer = CreateWriterWithData([1, 2, 3]); + + Assert.Equal(0, writer.GetHashCode(0, 0)); + } + + [Fact] + public void GetHashCode_SingleByte_MatchesScalar() + { + byte[] data = [42]; + using var writer = CreateWriterWithData(data); + + Assert.Equal(ScalarHash(data), writer.GetHashCode(0, data.Length)); + } + + [Theory] + [InlineData(1)] + [InlineData(3)] + [InlineData(4)] + [InlineData(7)] + [InlineData(8)] + [InlineData(15)] + [InlineData(16)] + [InlineData(31)] + [InlineData(32)] + [InlineData(33)] + [InlineData(63)] + [InlineData(64)] + [InlineData(65)] + [InlineData(100)] + [InlineData(256)] + [InlineData(500)] + public void GetHashCode_VariousLengths_MatchesScalar(int length) + { + var data = new byte[length]; + var rng = new Random(42); + rng.NextBytes(data); + + using var writer = CreateWriterWithData(data); + + Assert.Equal(ScalarHash(data), writer.GetHashCode(0, data.Length)); + } + + [Fact] + public void GetHashCode_AllZeros_MatchesScalar() + { + var data = new byte[128]; + using var writer = CreateWriterWithData(data); + + Assert.Equal(ScalarHash(data), writer.GetHashCode(0, data.Length)); + } + + [Fact] + public void GetHashCode_AllOnes_MatchesScalar() + { + var data = new byte[128]; + Array.Fill(data, (byte)0xFF); + using var writer = CreateWriterWithData(data); + + Assert.Equal(ScalarHash(data), writer.GetHashCode(0, data.Length)); + } + + [Fact] + public void GetHashCode_SequentialBytes_MatchesScalar() + { + var data = new byte[256]; + + for (var i = 0; i < data.Length; i++) + { + data[i] = (byte)(i & 0xFF); + } + + using var writer = CreateWriterWithData(data); + + Assert.Equal(ScalarHash(data), writer.GetHashCode(0, data.Length)); + } + + [Fact] + public void GetHashCode_OffsetIntoData_MatchesScalar() + { + var data = new byte[200]; + var rng = new Random(99); + rng.NextBytes(data); + + using var writer = CreateWriterWithData(data); + + // Hash a sub-range starting at offset 50, length 100 + var expected = ScalarHash(data[50..150]); + Assert.Equal(expected, writer.GetHashCode(50, 100)); + } + + [Fact] + public void GetHashCode_ConsistentAcrossMultipleCalls() + { + var data = new byte[128]; + var rng = new Random(7); + rng.NextBytes(data); + + using var writer = CreateWriterWithData(data); + + var hash1 = writer.GetHashCode(0, data.Length); + var hash2 = writer.GetHashCode(0, data.Length); + + Assert.Equal(hash1, hash2); + } + + [Fact] + public void GetHashCode_DifferentData_DifferentHashes() + { + var data1 = new byte[64]; + var data2 = new byte[64]; + Array.Fill(data1, (byte)1); + Array.Fill(data2, (byte)2); + + using var writer1 = CreateWriterWithData(data1); + using var writer2 = CreateWriterWithData(data2); + + Assert.NotEqual( + writer1.GetHashCode(0, data1.Length), + writer2.GetHashCode(0, data2.Length)); + } + + [Fact] + public void GetHashCode_ResultIsNonNegative() + { + var data = new byte[256]; + var rng = new Random(13); + rng.NextBytes(data); + + using var writer = CreateWriterWithData(data); + + Assert.True(writer.GetHashCode(0, data.Length) >= 0); + } + + /// + /// Verifies the SIMD-accelerated hash path produces identical results to the + /// scalar reference for every length from 0..512 across multiple random seeds. + /// Lengths 0-31 use the scalar path, 32-63 use Vector128, and 64+ use Vector256 + /// (when hardware accelerated). This test ensures all three tiers and every + /// remainder length produce the same hash. + /// + [Fact] + public void GetHashCode_SimdMatchesScalar_AllLengthsUpTo512() + { + for (var seed = 0; seed < 5; seed++) + { + var rng = new Random(seed); + + for (var length = 0; length <= 512; length++) + { + var data = new byte[length]; + rng.NextBytes(data); + + using var writer = CreateWriterWithData(data); + + var expected = ScalarHash(data); + var actual = writer.GetHashCode(0, data.Length); + + Assert.Equal(expected, actual); + } + } + } +} diff --git a/src/HotChocolate/Utilities/test/Utilities.Tests/HotChocolate.Utilities.Tests.csproj b/src/HotChocolate/Utilities/test/Utilities.Tests/HotChocolate.Utilities.Tests.csproj index 540588101e5..996aeaaab01 100644 --- a/src/HotChocolate/Utilities/test/Utilities.Tests/HotChocolate.Utilities.Tests.csproj +++ b/src/HotChocolate/Utilities/test/Utilities.Tests/HotChocolate.Utilities.Tests.csproj @@ -7,6 +7,7 @@ + diff --git a/src/StrawberryShake/Client/src/Transport.Http/HttpConnection.cs b/src/StrawberryShake/Client/src/Transport.Http/HttpConnection.cs index d3a300f5fd8..f06725c2ff3 100644 --- a/src/StrawberryShake/Client/src/Transport.Http/HttpConnection.cs +++ b/src/StrawberryShake/Client/src/Transport.Http/HttpConnection.cs @@ -1,5 +1,6 @@ using System.Text; using System.Text.Json; +using HotChocolate.Transport; using HotChocolate.Transport.Http; using static StrawberryShake.Properties.Resources; using static StrawberryShake.Transport.Http.ResponseEnumerable;