Skip to content
Merged
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
{
"id": "abc",
"type": "data",
"payload": {
"data": {
"onReview": {
"stars": 5
}
}
}
}
2 changes: 1 addition & 1 deletion src/HotChocolate/Fusion/benchmarks/k6/deep-recursion.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { check } from "k6";
import { Rate } from "k6/metrics";
import { textSummary } from "https://jslib.k6.io/k6-summary/0.0.1/index.js";

const GRAPHQL_URL = 'http://localhost:5220/graphql';
const GRAPHQL_URL = 'http://localhost:5000/graphql';
const endpoint = __ENV.GATEWAY_ENDPOINT || GRAPHQL_URL;
const mode = __ENV.MODE || "constant";
const isConstant = mode === "constant";
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,180 @@
using System.Diagnostics.Tracing;

namespace eShop.Gateway;

/// <summary>
/// Listens to PathSegmentPool ETW events and logs aggregated usage metrics.
/// </summary>
internal sealed class PathSegmentPoolDiagnostics : EventListener, IHostedService
{
private readonly Timer _timer;

private int _poolId;
private int _segmentSize;
private int _maxArrays;
private long _maxBytes;

private long _rented;
private long _returned;
private long _exhausted;
private long _dropped;
private long _allocated;
private long _trimmedEvents;
private int _lastTrimRemaining;
private int _lastTrimInUse;
private int _peakInUse;

public PathSegmentPoolDiagnostics()
{
_timer = new Timer(LogSnapshot, null, Timeout.Infinite, Timeout.Infinite);
}

protected override void OnEventSourceCreated(EventSource eventSource)
{
if (eventSource.Name == "HotChocolate-Fusion-PathSegmentPool")
{
EnableEvents(eventSource, EventLevel.Verbose);
}
}

protected override void OnEventWritten(EventWrittenEventArgs e)
{
// Event IDs from PathSegmentPoolEventSource:
// 1 = PoolCreated (PoolId, SegmentSize, Arrays, TotalBytes)
// 2 = SegmentRented (ArrayId, Length, PoolId, InUse)
// 3 = SegmentReturned (ArrayId, Length, PoolId, InUse)
// 4 = PoolExhausted (PoolId, MaxArrays)
// 5 = SegmentDropped (ArrayId, Length, PoolId)
// 6 = SegmentAllocated(ArrayId, Length, PoolId)
// 7 = PoolTrimmed (PoolId, Trimmed, Remaining, InUse)
switch (e.EventId)
{
case 1:
if (e.Payload is { Count: >= 4 })
{
if (e.Payload[0] is int poolId)
{
_poolId = poolId;
}
if (e.Payload[1] is int segmentSize)
{
_segmentSize = segmentSize;
}
if (e.Payload[2] is int maxArrays)
{
_maxArrays = maxArrays;
}
if (e.Payload[3] is long maxBytes)
{
_maxBytes = maxBytes;
}
}
break;

case 2:
Interlocked.Increment(ref _rented);
if (e.Payload is { Count: >= 4 } && e.Payload[3] is int inUseRent)
{
UpdatePeakInUse(inUseRent);
}
break;

case 3:
Interlocked.Increment(ref _returned);
break;

case 4:
Interlocked.Increment(ref _exhausted);
break;

case 5:
Interlocked.Increment(ref _dropped);
break;

case 6:
Interlocked.Increment(ref _allocated);
break;

case 7:
Interlocked.Increment(ref _trimmedEvents);
if (e.Payload is { Count: >= 4 })
{
if (e.Payload[2] is int remaining)
{
_lastTrimRemaining = remaining;
}
if (e.Payload[3] is int inUseTrim)
{
_lastTrimInUse = inUseTrim;
}
}
break;
}
}

private void UpdatePeakInUse(int inUse)
{
int current;
do
{
current = _peakInUse;
if (inUse <= current)
{
return;
}
}
while (Interlocked.CompareExchange(ref _peakInUse, inUse, current) != current);
}

private void LogSnapshot(object? state)
{
var rented = Interlocked.Read(ref _rented);
var returned = Interlocked.Read(ref _returned);
var exhausted = Interlocked.Read(ref _exhausted);
var dropped = Interlocked.Read(ref _dropped);
var allocated = Interlocked.Read(ref _allocated);
var trimmedEvents = Interlocked.Read(ref _trimmedEvents);
var outstanding = rented - returned;

Console.WriteLine(
"[PathSegmentPool] PoolId={0}, SegmentSize={1}, MaxArrays={2}, MaxBytes={3}, "
+ "Rented={4}, Returned={5}, Outstanding={6}, PeakInUse={7}, "
+ "Exhausted={8}, Allocated={9}, Dropped={10}, "
+ "TrimmedEvents={11}, LastTrimRemaining={12}, LastTrimInUse={13}",
_poolId,
_segmentSize,
_maxArrays,
_maxBytes,
rented,
returned,
outstanding,
_peakInUse,
exhausted,
allocated,
dropped,
trimmedEvents,
_lastTrimRemaining,
_lastTrimInUse);
}

public Task StartAsync(CancellationToken cancellationToken)
{
Console.WriteLine("[PathSegmentPool] Diagnostics started");
_timer.Change(TimeSpan.FromSeconds(5), TimeSpan.FromSeconds(5));
return Task.CompletedTask;
}

public Task StopAsync(CancellationToken cancellationToken)
{
_timer.Change(Timeout.Infinite, Timeout.Infinite);
LogSnapshot(null);
Console.WriteLine("[PathSegmentPool] Diagnostics stopped - final snapshot logged above");
return Task.CompletedTask;
}

public override void Dispose()
{
_timer.Dispose();
base.Dispose();
}
}
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
ThreadPool.SetMinThreads(1024, 1024);
ThreadPool.SetMinThreads(512, 512);

var builder = WebApplication.CreateBuilder(args);

Expand All @@ -11,8 +11,7 @@

builder
.AddGraphQLGateway()
.ModifyPlannerOptions(o => o.EnableRequestGrouping = true)
.AddFileSystemConfiguration("./gateway.far");
.AddFileSystemConfiguration("gateway.far");

var app = builder.Build();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
"commandName": "Project",
"dotnetRunMessages": true,
"launchBrowser": true,
"launchUrl": "http://localhost:5220/graphql",
"applicationUrl": "http://localhost:5220",
"launchUrl": "http://localhost:5000/graphql",
"applicationUrl": "http://localhost:5000",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Production"
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
using HotChocolate.Fusion.Configuration;
using HotChocolate.Fusion.Execution;
using HotChocolate.Fusion.Execution.Clients;
using HotChocolate.Fusion.Execution.Results;
using HotChocolate.Language;
using Microsoft.Extensions.DependencyInjection.Extensions;
using Microsoft.Extensions.ObjectPool;
Expand Down Expand Up @@ -89,10 +90,18 @@ private static DefaultFusionGatewayBuilder CreateBuilder(

var builder = new DefaultFusionGatewayBuilder(services, name);
builder.AddDocumentCache();
builder.AddFetchResultStorePool();
builder.UseDefaultPipeline();
return builder;
}

private static void AddFetchResultStorePool(this IFusionGatewayBuilder builder)
=> builder.ConfigureSchemaServices(
static (_, s) => s.TryAddSingleton(
new FetchResultStorePool(
levels: [4, 16, 64],
trimInterval: TimeSpan.FromMinutes(5))));

private static IFusionGatewayBuilder AddDocumentCache(this IFusionGatewayBuilder builder)
{
builder.Services.TryAddKeyedSingleton<IDocumentCache>(
Expand Down
Loading
Loading