Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
2 changes: 1 addition & 1 deletion router-tests/go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ require (
github.com/wundergraph/cosmo/demo/pkg/subgraphs/projects v0.0.0-20250715110703-10f2e5f9c79e
github.com/wundergraph/cosmo/router v0.0.0-20260319123623-f186a0f724f6
github.com/wundergraph/cosmo/router-plugin v0.0.0-20250808194725-de123ba1c65e
github.com/wundergraph/graphql-go-tools/v2 v2.0.0-rc.267
github.com/wundergraph/graphql-go-tools/v2 v2.0.0-rc.267.0.20260409183305-e7151238827b
go.opentelemetry.io/otel v1.39.0
go.opentelemetry.io/otel/sdk v1.39.0
go.opentelemetry.io/otel/sdk/metric v1.39.0
Expand Down
4 changes: 2 additions & 2 deletions router-tests/go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -357,8 +357,8 @@ github.com/wundergraph/astjson v1.1.0 h1:xORDosrZ87zQFJwNGe/HIHXqzpdHOFmqWgykCLV
github.com/wundergraph/astjson v1.1.0/go.mod h1:h12D/dxxnedtLzsKyBLK7/Oe4TAoGpRVC9nDpDrZSWw=
github.com/wundergraph/go-arena v1.1.0 h1:9+wSRkJAkA2vbYHp6s8tEGhPViRGQNGXqPHT0QzhdIc=
github.com/wundergraph/go-arena v1.1.0/go.mod h1:ROOysEHWJjLQ8FSfNxZCziagb7Qw2nXY3/vgKRh7eWw=
github.com/wundergraph/graphql-go-tools/v2 v2.0.0-rc.267 h1:qMkYR0oq0Cw61aDZs9VsCCVwNVSxRxT13ytz6WqCwJg=
github.com/wundergraph/graphql-go-tools/v2 v2.0.0-rc.267/go.mod h1:HjTAO/cuICpu31IfHY9qmSPygx6Gza7Wt9hTSReTI+A=
github.com/wundergraph/graphql-go-tools/v2 v2.0.0-rc.267.0.20260409183305-e7151238827b h1:8qC1P3uSAyxD3qU0ne88xNjA08KAIyeCRuxKQQn7wa4=
github.com/wundergraph/graphql-go-tools/v2 v2.0.0-rc.267.0.20260409183305-e7151238827b/go.mod h1:HjTAO/cuICpu31IfHY9qmSPygx6Gza7Wt9hTSReTI+A=
github.com/xrash/smetrics v0.0.0-20250705151800-55b8f293f342 h1:FnBeRrxr7OU4VvAzt5X7s6266i6cSVkkFPS0TuXWbIg=
github.com/xrash/smetrics v0.0.0-20250705151800-55b8f293f342/go.mod h1:Ohn+xnUBiLI6FVj/9LpzZWtj1/D6lUovWYBkxHVV3aM=
github.com/yosida95/uritemplate/v3 v3.0.2 h1:Ed3Oyj9yrmi9087+NczuL5BwkIc4wvTb5zIM+UJPGz4=
Expand Down
305 changes: 305 additions & 0 deletions router-tests/protocol/defer_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,305 @@
package integration

import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"os"
"path/filepath"
"slices"
"strings"
"testing"

"github.com/sebdah/goldie/v2"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/wundergraph/astjson"

"github.com/wundergraph/cosmo/router-tests/testenv"
"github.com/wundergraph/cosmo/router/pkg/config"
)

func TestDeferTestDataQueries(t *testing.T) {
t.Parallel()

testDir := filepath.Join("testdata", "queries_defer")
entries, err := os.ReadDir(testDir)
require.NoError(t, err)

groupQueries := map[string][]string{}

for _, entry := range entries {
fileName := entry.Name()
ext := filepath.Ext(fileName)
name := strings.TrimSuffix(fileName, ext)

if ext != ".graphql" {
continue
}

// "full_defer_01_single_defer" → source = "full"
source, _, found := strings.Cut(name, "_defer_")
if !found {
continue
}

groupQueries[source] = append(groupQueries[source], name)
}

groups := make([]string, 0, len(groupQueries))
for k, _ := range groupQueries {
groups = append(groups, k)
}
slices.Sort(groups)

for _, group := range groups {
t.Run(group, func(t *testing.T) {
for _, name := range groupQueries[group] {
t.Run(name, func(t *testing.T) {
t.Parallel()

gMultipart := goldie.New(
t,
goldie.WithFixtureDir("testdata/queries_defer"),
goldie.WithNameSuffix(".txt"),
goldie.WithDiffEngine(goldie.ClassicDiff),
)
gFull := goldie.New(
t,
goldie.WithFixtureDir("testdata/queries_defer"),
goldie.WithNameSuffix(".json"),
goldie.WithDiffEngine(goldie.ClassicDiff),
)

testenv.Run(t, &testenv.Config{
NoRetryClient: true,
ModifyEngineExecutionConfiguration: func(cfg *config.EngineExecutionConfiguration) {
// cfg.Debug.PrintIntermediateQueryPlans = true
// cfg.Debug.PrintPlanningPaths = true
// cfg.Debug.PrintNodeSuggestions = true
// cfg.Debug.PrintOperationTransformations = true
},
}, func(t *testing.T, xEnv *testenv.Environment) {
queryFilePath := filepath.Join(testDir, fmt.Sprintf("%s.graphql", name))
t.Cleanup(func() {
if t.Failed() {
abs, _ := filepath.Abs(queryFilePath)
t.Logf("query file: %s", abs)
}
})

queryData, err := os.ReadFile(queryFilePath)
require.NoError(t, err)

payload := map[string]any{"query": string(queryData)}
payloadData, err := json.Marshal(payload)
require.NoError(t, err)

req := xEnv.MakeGraphQLDeferRequest(http.MethodPost, bytes.NewReader(payloadData))
res, err := xEnv.RouterClient.Do(req)
require.NoError(t, err)
defer func() { require.NoError(t, res.Body.Close()) }()

assert.Equal(t, http.StatusOK, res.StatusCode)

// defer could be fully discarded in case query has duplicate field which are not deffered
isMultipart := strings.HasPrefix(res.Header.Get("Content-Type"), "multipart/mixed")

body, err := io.ReadAll(res.Body)
require.NoError(t, err)

update := false

t.Run("raw multipart body", func(t *testing.T) {
if !update {
gMultipart.Assert(t, name, body)
} else {
gMultipart.Update(t, name, body)
}
})

var actual []byte

if isMultipart {
// Reconstruct the full response from chunks
reconstructed, err := reconstructDeferResponse(body)
require.NoError(t, err)
actual = normalizeJSON(t, reconstructed)
} else {
actual = normalizeJSON(t, body)
}

t.Run("assert full response", func(t *testing.T) {
if !update {
gFull.Assert(t, name+"_reconstructed", actual)
} else {
gFull.Update(t, name+"_reconstructed", actual)
}
})

t.Run("compare with response without defer", func(t *testing.T) {
expected, err := os.ReadFile(gFull.GoldenFileName(t, group+"_original"))
require.NoError(t, err)

expected = normalizeWithKeysSort(t, expected)
actual = normalizeWithKeysSort(t, actual)

// manually assert to never update the original when the update flag is specified
if diff := goldie.Diff(goldie.ClassicDiff, string(actual), string(expected)); diff != "" {
t.Fatal(diff)
}
})
})
})
}
})
}
}

func normalizeWithKeysSort(tb testing.TB, data []byte) []byte {
var val map[string]interface{}
require.NoError(tb, json.Unmarshal(data, &val))

out, err := json.MarshalIndent(val, "", " ")
require.NoError(tb, err)

return out
}

// reconstructDeferResponse parses a multipart/mixed defer body, merges all
// incremental patches onto the initial data using astjson, and returns
// the complete JSON response (without transport fields like hasNext).
func reconstructDeferResponse(body []byte) ([]byte, error) {
parts, err := parseMultipartParts(body)
if err != nil {
return nil, err
}
if len(parts) == 0 {
return nil, fmt.Errorf("no parts in multipart response")
}

var p astjson.Parser
result, err := p.ParseBytes(parts[0])
if err != nil {
return nil, fmt.Errorf("parse initial part: %w", err)
}

for _, part := range parts[1:] {
partVal, err := p.ParseBytes(part)
if err != nil {
return nil, fmt.Errorf("parse part: %w", err)
}

for _, item := range partVal.GetArray("incremental") {
patchData := item.Get("data")
if patchData == nil {
continue
}

// Build path: prepend "data", then each segment from the path array.
pathKeys := []string{"data"}
for _, seg := range item.GetArray("path") {
switch seg.Type() {
case astjson.TypeNumber:
pathKeys = append(pathKeys, string(seg.MarshalTo(nil)))
default:
s, _ := seg.StringBytes()
pathKeys = append(pathKeys, string(s))
}
}

if err := mergeAtPath(result, patchData, pathKeys); err != nil {
return nil, fmt.Errorf("merge at path %v: %w", pathKeys, err)
}

// Collect errors from incremental items into root errors.
patchErrors := item.Get("errors")
if patchErrors != nil && patchErrors.Type() == astjson.TypeArray {
existing := result.Get("errors")
if existing == nil || existing.Type() == astjson.TypeNull {
result.Set(nil, "errors", patchErrors)
} else {
merged := appendArrayValues(existing, patchErrors)
result.Set(nil, "errors", merged)
}
}
Comment on lines +195 to +227
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

Don’t drop incremental errors when data is absent.

The early continue on Line 198 means an incremental entry that only carries errors never reaches the merge on Lines 217-227, so deferred error cases get reconstructed without those errors.

Suggested fix
 		for _, item := range partVal.GetArray("incremental") {
+			patchErrors := item.Get("errors")
+			if patchErrors != nil && patchErrors.Type() == astjson.TypeArray {
+				existing := result.Get("errors")
+				if existing == nil || existing.Type() == astjson.TypeNull {
+					result.Set(nil, "errors", patchErrors)
+				} else {
+					merged := appendArrayValues(existing, patchErrors)
+					result.Set(nil, "errors", merged)
+				}
+			}
+
 			patchData := item.Get("data")
 			if patchData == nil {
 				continue
 			}
@@
-			// Collect errors from incremental items into root errors.
-			patchErrors := item.Get("errors")
-			if patchErrors != nil && patchErrors.Type() == astjson.TypeArray {
-				existing := result.Get("errors")
-				if existing == nil || existing.Type() == astjson.TypeNull {
-					result.Set(nil, "errors", patchErrors)
-				} else {
-					merged := appendArrayValues(existing, patchErrors)
-					result.Set(nil, "errors", merged)
-				}
-			}
 		}
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
for _, item := range partVal.GetArray("incremental") {
patchData := item.Get("data")
if patchData == nil {
continue
}
// Build path: prepend "data", then each segment from the path array.
pathKeys := []string{"data"}
for _, seg := range item.GetArray("path") {
switch seg.Type() {
case astjson.TypeNumber:
pathKeys = append(pathKeys, string(seg.MarshalTo(nil)))
default:
s, _ := seg.StringBytes()
pathKeys = append(pathKeys, string(s))
}
}
if err := mergeAtPath(result, patchData, pathKeys); err != nil {
return nil, fmt.Errorf("merge at path %v: %w", pathKeys, err)
}
// Collect errors from incremental items into root errors.
patchErrors := item.Get("errors")
if patchErrors != nil && patchErrors.Type() == astjson.TypeArray {
existing := result.Get("errors")
if existing == nil || existing.Type() == astjson.TypeNull {
result.Set(nil, "errors", patchErrors)
} else {
merged := appendArrayValues(existing, patchErrors)
result.Set(nil, "errors", merged)
}
}
for _, item := range partVal.GetArray("incremental") {
patchErrors := item.Get("errors")
if patchErrors != nil && patchErrors.Type() == astjson.TypeArray {
existing := result.Get("errors")
if existing == nil || existing.Type() == astjson.TypeNull {
result.Set(nil, "errors", patchErrors)
} else {
merged := appendArrayValues(existing, patchErrors)
result.Set(nil, "errors", merged)
}
}
patchData := item.Get("data")
if patchData == nil {
continue
}
// Build path: prepend "data", then each segment from the path array.
pathKeys := []string{"data"}
for _, seg := range item.GetArray("path") {
switch seg.Type() {
case astjson.TypeNumber:
pathKeys = append(pathKeys, string(seg.MarshalTo(nil)))
default:
s, _ := seg.StringBytes()
pathKeys = append(pathKeys, string(s))
}
}
if err := mergeAtPath(result, patchData, pathKeys); err != nil {
return nil, fmt.Errorf("merge at path %v: %w", pathKeys, err)
}
}
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@router-tests/defer_test.go` around lines 195 - 227, The loop currently does
an early continue when patchData is nil which skips collecting incremental
"errors"; instead, change the loop so that the error-collection logic always
runs whether or not patchData exists: only skip the merge step when patchData ==
nil, but always execute the block that reads item.Get("errors") and merges into
result using existing := result.Get("errors"), appendArrayValues(existing,
patchErrors) and result.Set(nil, "errors", merged). Keep references to
mergeAtPath, appendArrayValues, patchData, patchErrors, pathKeys and result when
modifying the control flow so that path-building and mergeAtPath are conditional
but error aggregation always happens.

}
}

// Remove transport-only field.
result.Del("hasNext")

return result.MarshalTo(nil), nil
}

// mergeAtPath navigates result to the node at pathKeys and deep-merges patch there.
func mergeAtPath(result, patch *astjson.Value, pathKeys []string) error {
if len(pathKeys) == 0 {
_, _, err := astjson.MergeValues(nil, result, patch)
return err
}

// Navigate to the parent of the target node.
current := result
for _, key := range pathKeys[:len(pathKeys)-1] {
next := current.Get(key)
if next == nil {
return nil
}
current = next
}

lastKey := pathKeys[len(pathKeys)-1]
target := current.Get(lastKey)
if target == nil {
current.Set(nil, lastKey, patch)
return nil
}

merged, _, err := astjson.MergeValues(nil, target, patch)
if err != nil {
return err
}
current.Set(nil, lastKey, merged)
return nil
}

// appendArrayValues returns a new TypeArray containing all elements of a followed by all of b.
func appendArrayValues(a, b *astjson.Value) *astjson.Value {
out := astjson.ArrayValue(nil)
idx := 0
for _, v := range a.GetArray() {
out.SetArrayItem(nil, idx, v)
idx++
}
for _, v := range b.GetArray() {
out.SetArrayItem(nil, idx, v)
idx++
}
return out
}

// parseMultipartParts splits a multipart/mixed body on the --graphql boundary
// and returns the raw JSON bytes of each part.
func parseMultipartParts(body []byte) ([][]byte, error) {
boundary := []byte("\r\n--graphql")
parts := bytes.Split(body, boundary)
var result [][]byte
for _, part := range parts {
if bytes.HasPrefix(part, []byte("--")) {
continue
}
_, jsonBody, found := bytes.Cut(part, []byte("\r\n\r\n"))
if !found {
continue
}
jsonBody = bytes.TrimSpace(jsonBody)
if len(jsonBody) == 0 {
continue
}
result = append(result, jsonBody)
}
return result, nil
}
31 changes: 28 additions & 3 deletions router-tests/protocol/header_set_test.go
Original file line number Diff line number Diff line change
@@ -1,23 +1,26 @@
package integration

import (
"github.com/wundergraph/cosmo/router-tests/testutils"

"bytes"
"fmt"
"io"
"net/http"
"strings"
"testing"
"time"

"github.com/wundergraph/cosmo/router-tests/testutils"

"github.com/MicahParks/jwkset"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.uber.org/zap"

"github.com/wundergraph/cosmo/router-tests/jwks"
"github.com/wundergraph/cosmo/router-tests/testenv"
"github.com/wundergraph/cosmo/router/core"
"github.com/wundergraph/cosmo/router/pkg/authentication"
"github.com/wundergraph/cosmo/router/pkg/config"
"go.uber.org/zap"
)

func TestHeaderSet(t *testing.T) {
Expand Down Expand Up @@ -76,6 +79,28 @@ func TestHeaderSet(t *testing.T) {
require.Equal(t, fmt.Sprintf(`{"data":{"headerValue":"%s"}}`, employeeVal), res.Body)
})
})

t.Run("global request rule sets header for defer", func(t *testing.T) {
t.Parallel()
testenv.Run(t, &testenv.Config{
RouterOptions: global(customHeader, employeeVal),
}, func(t *testing.T, xEnv *testenv.Environment) {
payload := []byte(fmt.Sprintf(`{"query":"query { ... @defer { headerValue(name:\"%s\") } }"}`, customHeader))

req := xEnv.MakeGraphQLDeferRequest(http.MethodPost, bytes.NewReader(payload))
res, err := xEnv.RouterClient.Do(req)
require.NoError(t, err)
defer func() { require.NoError(t, res.Body.Close()) }()
assert.Equal(t, http.StatusOK, res.StatusCode)

body, err := io.ReadAll(res.Body)
require.NoError(t, err)

bodyString := string(body)
assert.Contains(t, bodyString, `{"data":{},"hasNext":true}`)
assert.Contains(t, bodyString, fmt.Sprintf(`{"incremental":[{"data":{"headerValue":"%s"},"path":[]}],"hasNext":false}`, employeeVal))
})
})
})

t.Run("ResponseSet", func(t *testing.T) {
Expand Down
Loading
Loading