From 4e24b10cc47906b070e3ac938218e5f61c11d8ac Mon Sep 17 00:00:00 2001 From: StarpTech Date: Tue, 18 Nov 2025 14:33:26 +0100 Subject: [PATCH 01/22] feat: improve input and argument usage --- graphqlmetrics/core/metrics_service.go | 20 +- .../graphqlmetrics/v1/graphqlmetrics.pb.go | 175 ++-- .../graphqlmetrics/v1/graphqlmetrics.proto | 4 + router/core/operation_planner.go | 6 +- router/demo.config.yaml | 6 +- .../graphqlmetrics/v1/graphqlmetrics.pb.go | 174 ++-- router/pkg/graphqlschemausage/schemausage.go | 338 ++++++- .../graphqlschemausage/schemausage_test.go | 855 ++++++++++++++++-- 8 files changed, 1349 insertions(+), 229 deletions(-) diff --git a/graphqlmetrics/core/metrics_service.go b/graphqlmetrics/core/metrics_service.go index 514dbbd44f..9a14991d4e 100644 --- a/graphqlmetrics/core/metrics_service.go +++ b/graphqlmetrics/core/metrics_service.go @@ -348,8 +348,17 @@ func (s *MetricsService) appendUsageMetrics( } } + fmt.Println(schemaUsage.OperationInfo.Name) + for _, argumentUsage := range schemaUsage.ArgumentMetrics { + // Sort stable for fields where the order doesn't matter + // This reduce cardinality and improves compression + + sort.SliceStable(argumentUsage.SubgraphIDs, func(i, j int) bool { + return argumentUsage.SubgraphIDs[i] < argumentUsage.SubgraphIDs[j] + }) + err := metricBatch.Append( insertTime, claims.OrganizationID, @@ -366,7 +375,7 @@ func (s *MetricsService) appendUsageMetrics( schemaUsage.ClientInfo.Version, strconv.FormatInt(int64(schemaUsage.RequestInfo.StatusCode), 10), schemaUsage.RequestInfo.Error, - []string{}, + argumentUsage.SubgraphIDs, true, false, schemaUsage.Attributes, @@ -379,6 +388,13 @@ func (s *MetricsService) appendUsageMetrics( for _, inputUsage := range schemaUsage.InputMetrics { + // Sort stable for fields where the order doesn't matter + // This reduce cardinality and improves compression + + sort.SliceStable(inputUsage.SubgraphIDs, func(i, j int) bool { + return inputUsage.SubgraphIDs[i] < inputUsage.SubgraphIDs[j] + }) + err := metricBatch.Append( insertTime, claims.OrganizationID, @@ -395,7 +411,7 @@ func (s *MetricsService) appendUsageMetrics( schemaUsage.ClientInfo.Version, strconv.FormatInt(int64(schemaUsage.RequestInfo.StatusCode), 10), schemaUsage.RequestInfo.Error, - []string{}, + inputUsage.SubgraphIDs, false, true, schemaUsage.Attributes, diff --git a/graphqlmetrics/gen/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.pb.go b/graphqlmetrics/gen/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.pb.go index 107671e28f..d52274c371 100644 --- a/graphqlmetrics/gen/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.pb.go +++ b/graphqlmetrics/gen/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.pb.go @@ -579,6 +579,8 @@ type ArgumentUsageInfo struct { Count uint64 `protobuf:"varint,3,opt,name=Count,proto3" json:"Count,omitempty"` // NamedType is the underlying type of the argument NamedType string `protobuf:"bytes,4,opt,name=NamedType,proto3" json:"NamedType,omitempty"` + // SubgraphIDs is the list of datasource IDs (e.g subgraph ID) that the argument is used from + SubgraphIDs []string `protobuf:"bytes,5,rep,name=SubgraphIDs,proto3" json:"SubgraphIDs,omitempty"` } func (x *ArgumentUsageInfo) Reset() { @@ -641,6 +643,13 @@ func (x *ArgumentUsageInfo) GetNamedType() string { return "" } +func (x *ArgumentUsageInfo) GetSubgraphIDs() []string { + if x != nil { + return x.SubgraphIDs + } + return nil +} + type InputUsageInfo struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -656,6 +665,8 @@ type InputUsageInfo struct { NamedType string `protobuf:"bytes,4,opt,name=NamedType,proto3" json:"NamedType,omitempty"` // EnumValues is an empty list if the input field is not an enum, otherwise it contains the list of used enum values EnumValues []string `protobuf:"bytes,5,rep,name=EnumValues,proto3" json:"EnumValues,omitempty"` + // SubgraphIDs is the list of datasource IDs (e.g subgraph ID) that the input is used from + SubgraphIDs []string `protobuf:"bytes,6,rep,name=SubgraphIDs,proto3" json:"SubgraphIDs,omitempty"` } func (x *InputUsageInfo) Reset() { @@ -725,6 +736,13 @@ func (x *InputUsageInfo) GetEnumValues() []string { return nil } +func (x *InputUsageInfo) GetSubgraphIDs() []string { + if x != nil { + return x.SubgraphIDs + } + return nil +} + type PublishGraphQLRequestMetricsRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -994,91 +1012,96 @@ var file_wg_cosmo_graphqlmetrics_v1_graphqlmetrics_proto_rawDesc = []byte{ 0x72, 0x65, 0x63, 0x74, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x16, 0x49, 0x6e, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x46, 0x69, 0x65, 0x6c, 0x64, - 0x22, 0x77, 0x0a, 0x11, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x55, 0x73, 0x61, 0x67, - 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x50, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x09, 0x52, 0x04, 0x50, 0x61, 0x74, 0x68, 0x12, 0x1a, 0x0a, 0x08, 0x54, 0x79, 0x70, - 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x54, 0x79, 0x70, - 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x4e, - 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, - 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x22, 0x94, 0x01, 0x0a, 0x0e, 0x49, 0x6e, - 0x70, 0x75, 0x74, 0x55, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, - 0x50, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x50, 0x61, 0x74, 0x68, - 0x12, 0x1a, 0x0a, 0x08, 0x54, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x08, 0x54, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, - 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x43, 0x6f, 0x75, - 0x6e, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, - 0x12, 0x1e, 0x0a, 0x0a, 0x45, 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x05, - 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x45, 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, - 0x22, 0x74, 0x0a, 0x23, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x47, 0x72, 0x61, 0x70, 0x68, + 0x22, 0x99, 0x01, 0x0a, 0x11, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x55, 0x73, 0x61, + 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x50, 0x61, 0x74, 0x68, 0x18, 0x01, + 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x50, 0x61, 0x74, 0x68, 0x12, 0x1a, 0x0a, 0x08, 0x54, 0x79, + 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x54, 0x79, + 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x1c, 0x0a, 0x09, + 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x53, 0x75, + 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x49, 0x44, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, + 0x0b, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x49, 0x44, 0x73, 0x22, 0xb6, 0x01, 0x0a, + 0x0e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x55, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, + 0x12, 0x0a, 0x04, 0x50, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x50, + 0x61, 0x74, 0x68, 0x12, 0x1a, 0x0a, 0x08, 0x54, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x54, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, + 0x14, 0x0a, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, + 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, + 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, + 0x79, 0x70, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x45, 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, 0x65, + 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x45, 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, + 0x75, 0x65, 0x73, 0x12, 0x20, 0x0a, 0x0b, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x49, + 0x44, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0b, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, + 0x70, 0x68, 0x49, 0x44, 0x73, 0x22, 0x74, 0x0a, 0x23, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, + 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, + 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x4d, 0x0a, 0x0b, + 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x55, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x2b, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, + 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x53, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x55, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x0b, + 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x55, 0x73, 0x61, 0x67, 0x65, 0x22, 0x28, 0x0a, 0x26, 0x50, + 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, + 0x6f, 0x76, 0x65, 0x72, 0x61, 0x67, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x89, 0x01, 0x0a, 0x2d, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, + 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x4d, 0x0a, 0x0b, 0x53, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x55, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x77, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x58, 0x0a, 0x0b, 0x41, 0x67, 0x67, 0x72, 0x65, + 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x36, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, - 0x55, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x0b, 0x53, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x55, 0x73, 0x61, 0x67, 0x65, 0x22, 0x28, 0x0a, 0x26, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, - 0x68, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x76, 0x65, 0x72, 0x61, - 0x67, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x22, 0x89, 0x01, 0x0a, 0x2d, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, + 0x55, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0b, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x22, 0x30, 0x0a, 0x2e, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x58, 0x0a, 0x0b, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x36, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, - 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, - 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x55, 0x73, 0x61, 0x67, 0x65, - 0x49, 0x6e, 0x66, 0x6f, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, - 0x0b, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x30, 0x0a, 0x2e, - 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, - 0x64, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, - 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2a, 0x3a, - 0x0a, 0x0d, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x12, - 0x09, 0x0a, 0x05, 0x51, 0x55, 0x45, 0x52, 0x59, 0x10, 0x00, 0x12, 0x0c, 0x0a, 0x08, 0x4d, 0x55, - 0x54, 0x41, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x01, 0x12, 0x10, 0x0a, 0x0c, 0x53, 0x55, 0x42, 0x53, - 0x43, 0x52, 0x49, 0x50, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x02, 0x32, 0xf5, 0x02, 0x0a, 0x15, 0x47, - 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x53, 0x65, 0x72, - 0x76, 0x69, 0x63, 0x65, 0x12, 0x9e, 0x01, 0x0a, 0x15, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, - 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x12, 0x3f, + 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x2a, 0x3a, 0x0a, 0x0d, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x54, 0x79, 0x70, 0x65, 0x12, 0x09, 0x0a, 0x05, 0x51, 0x55, 0x45, 0x52, 0x59, 0x10, 0x00, 0x12, + 0x0c, 0x0a, 0x08, 0x4d, 0x55, 0x54, 0x41, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x01, 0x12, 0x10, 0x0a, + 0x0c, 0x53, 0x55, 0x42, 0x53, 0x43, 0x52, 0x49, 0x50, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x02, 0x32, + 0xf5, 0x02, 0x0a, 0x15, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x4d, 0x65, 0x74, 0x72, 0x69, + 0x63, 0x73, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x9e, 0x01, 0x0a, 0x15, 0x50, 0x75, + 0x62, 0x6c, 0x69, 0x73, 0x68, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x4d, 0x65, 0x74, 0x72, + 0x69, 0x63, 0x73, 0x12, 0x3f, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, + 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, + 0x2e, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x42, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, + 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, + 0x31, 0x2e, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x43, 0x6f, 0x76, 0x65, 0x72, 0x61, 0x67, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0xba, 0x01, 0x0a, 0x1f, 0x50, + 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, + 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x12, 0x49, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x75, 0x62, 0x6c, - 0x69, 0x73, 0x68, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x42, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, - 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x75, 0x62, - 0x6c, 0x69, 0x73, 0x68, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x76, - 0x65, 0x72, 0x61, 0x67, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0xba, 0x01, 0x0a, 0x1f, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, - 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, 0x47, 0x72, 0x61, 0x70, 0x68, - 0x51, 0x4c, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x12, 0x49, 0x2e, 0x77, 0x67, 0x2e, 0x63, + 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, 0x47, 0x72, 0x61, + 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, + 0x63, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x4a, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x1a, 0x4a, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, - 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, - 0x31, 0x2e, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, - 0x74, 0x65, 0x64, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x22, 0x00, 0x42, 0xa3, 0x02, 0x0a, 0x1e, 0x63, 0x6f, 0x6d, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, - 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, - 0x63, 0x73, 0x2e, 0x76, 0x31, 0x42, 0x13, 0x47, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, - 0x74, 0x72, 0x69, 0x63, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x61, 0x67, 0x69, - 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x77, 0x75, 0x6e, 0x64, 0x65, 0x72, 0x67, - 0x72, 0x61, 0x70, 0x68, 0x2f, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2f, 0x67, 0x72, 0x61, 0x70, 0x68, - 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x2f, 0x77, 0x67, 0x2f, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2f, 0x67, 0x72, 0x61, - 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2f, 0x76, 0x31, 0x3b, 0x67, - 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x76, 0x31, 0xa2, - 0x02, 0x03, 0x57, 0x43, 0x47, 0xaa, 0x02, 0x1a, 0x57, 0x67, 0x2e, 0x43, 0x6f, 0x73, 0x6d, 0x6f, - 0x2e, 0x47, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, - 0x56, 0x31, 0xca, 0x02, 0x1a, 0x57, 0x67, 0x5c, 0x43, 0x6f, 0x73, 0x6d, 0x6f, 0x5c, 0x47, 0x72, - 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x5c, 0x56, 0x31, 0xe2, - 0x02, 0x26, 0x57, 0x67, 0x5c, 0x43, 0x6f, 0x73, 0x6d, 0x6f, 0x5c, 0x47, 0x72, 0x61, 0x70, 0x68, - 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x5c, 0x56, 0x31, 0x5c, 0x47, 0x50, 0x42, - 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x1d, 0x57, 0x67, 0x3a, 0x3a, 0x43, - 0x6f, 0x73, 0x6d, 0x6f, 0x3a, 0x3a, 0x47, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, - 0x72, 0x69, 0x63, 0x73, 0x3a, 0x3a, 0x56, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0xa3, 0x02, 0x0a, 0x1e, 0x63, 0x6f, 0x6d, 0x2e, + 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, + 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, 0x42, 0x13, 0x47, 0x72, 0x61, 0x70, + 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, + 0x01, 0x5a, 0x61, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x77, 0x75, + 0x6e, 0x64, 0x65, 0x72, 0x67, 0x72, 0x61, 0x70, 0x68, 0x2f, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2f, + 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2f, 0x67, + 0x65, 0x6e, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x77, 0x67, 0x2f, 0x63, 0x6f, 0x73, 0x6d, + 0x6f, 0x2f, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, + 0x2f, 0x76, 0x31, 0x3b, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, + 0x63, 0x73, 0x76, 0x31, 0xa2, 0x02, 0x03, 0x57, 0x43, 0x47, 0xaa, 0x02, 0x1a, 0x57, 0x67, 0x2e, + 0x43, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x47, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, + 0x72, 0x69, 0x63, 0x73, 0x2e, 0x56, 0x31, 0xca, 0x02, 0x1a, 0x57, 0x67, 0x5c, 0x43, 0x6f, 0x73, + 0x6d, 0x6f, 0x5c, 0x47, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, + 0x73, 0x5c, 0x56, 0x31, 0xe2, 0x02, 0x26, 0x57, 0x67, 0x5c, 0x43, 0x6f, 0x73, 0x6d, 0x6f, 0x5c, + 0x47, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x5c, 0x56, + 0x31, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x1d, + 0x57, 0x67, 0x3a, 0x3a, 0x43, 0x6f, 0x73, 0x6d, 0x6f, 0x3a, 0x3a, 0x47, 0x72, 0x61, 0x70, 0x68, + 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x3a, 0x3a, 0x56, 0x31, 0x62, 0x06, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.proto b/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.proto index 29800621ac..6aeb7a3be4 100644 --- a/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.proto +++ b/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.proto @@ -89,6 +89,8 @@ message ArgumentUsageInfo { uint64 Count = 3; // NamedType is the underlying type of the argument string NamedType = 4; + // SubgraphIDs is the list of datasource IDs (e.g subgraph ID) that the argument is used from + repeated string SubgraphIDs = 5; } message InputUsageInfo { @@ -102,6 +104,8 @@ message InputUsageInfo { string NamedType = 4; // EnumValues is an empty list if the input field is not an enum, otherwise it contains the list of used enum values repeated string EnumValues = 5; + // SubgraphIDs is the list of datasource IDs (e.g subgraph ID) that the input is used from + repeated string SubgraphIDs = 6; } message PublishGraphQLRequestMetricsRequest { diff --git a/router/core/operation_planner.go b/router/core/operation_planner.go index 40c8d6701e..ec58b45091 100644 --- a/router/core/operation_planner.go +++ b/router/core/operation_planner.go @@ -83,7 +83,7 @@ func (p *OperationPlanner) preparePlan(ctx *operationContext) (*planWithMetaData if p.trackUsageInfo { out.typeFieldUsageInfo = graphqlschemausage.GetTypeFieldUsageInfo(preparedPlan) - out.argumentUsageInfo, err = graphqlschemausage.GetArgumentUsageInfo(&doc, p.executor.RouterSchema) + out.argumentUsageInfo, err = graphqlschemausage.GetArgumentUsageInfo(&doc, p.executor.RouterSchema, preparedPlan) if err != nil { return nil, err } @@ -114,7 +114,7 @@ func (p *OperationPlanner) plan(opContext *operationContext, options PlanOptions if options.TrackSchemaUsageInfo { opContext.typeFieldUsageInfo = prepared.typeFieldUsageInfo opContext.argumentUsageInfo = prepared.argumentUsageInfo - opContext.inputUsageInfo, err = graphqlschemausage.GetInputUsageInfo(prepared.operationDocument, p.executor.RouterSchema, opContext.variables) + opContext.inputUsageInfo, err = graphqlschemausage.GetInputUsageInfo(prepared.operationDocument, p.executor.RouterSchema, opContext.variables, prepared.preparedPlan, opContext.remapVariables) if err != nil { return err } @@ -152,7 +152,7 @@ func (p *OperationPlanner) plan(opContext *operationContext, options PlanOptions if options.TrackSchemaUsageInfo { opContext.typeFieldUsageInfo = opContext.preparedPlan.typeFieldUsageInfo opContext.argumentUsageInfo = opContext.preparedPlan.argumentUsageInfo - opContext.inputUsageInfo, err = graphqlschemausage.GetInputUsageInfo(opContext.preparedPlan.operationDocument, p.executor.RouterSchema, opContext.variables) + opContext.inputUsageInfo, err = graphqlschemausage.GetInputUsageInfo(opContext.preparedPlan.operationDocument, p.executor.RouterSchema, opContext.variables, opContext.preparedPlan.preparedPlan, opContext.remapVariables) if err != nil { return err } diff --git a/router/demo.config.yaml b/router/demo.config.yaml index 9a72e31de2..8390baa298 100644 --- a/router/demo.config.yaml +++ b/router/demo.config.yaml @@ -5,6 +5,10 @@ version: "1" +graphql_metrics: + enabled: true + collector_endpoint: http://localhost:4005 + events: providers: nats: @@ -19,4 +23,4 @@ events: redis: - id: my-redis urls: - - "redis://localhost:6379/2" \ No newline at end of file + - "redis://localhost:6379/2" \ No newline at end of file diff --git a/router/gen/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.pb.go b/router/gen/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.pb.go index 7127c79946..5a000c7050 100644 --- a/router/gen/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.pb.go +++ b/router/gen/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.pb.go @@ -579,6 +579,8 @@ type ArgumentUsageInfo struct { Count uint64 `protobuf:"varint,3,opt,name=Count,proto3" json:"Count,omitempty"` // NamedType is the underlying type of the argument NamedType string `protobuf:"bytes,4,opt,name=NamedType,proto3" json:"NamedType,omitempty"` + // SubgraphIDs is the list of datasource IDs (e.g subgraph ID) that the argument is used from + SubgraphIDs []string `protobuf:"bytes,5,rep,name=SubgraphIDs,proto3" json:"SubgraphIDs,omitempty"` } func (x *ArgumentUsageInfo) Reset() { @@ -641,6 +643,13 @@ func (x *ArgumentUsageInfo) GetNamedType() string { return "" } +func (x *ArgumentUsageInfo) GetSubgraphIDs() []string { + if x != nil { + return x.SubgraphIDs + } + return nil +} + type InputUsageInfo struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -656,6 +665,8 @@ type InputUsageInfo struct { NamedType string `protobuf:"bytes,4,opt,name=NamedType,proto3" json:"NamedType,omitempty"` // EnumValues is an empty list if the input field is not an enum, otherwise it contains the list of used enum values EnumValues []string `protobuf:"bytes,5,rep,name=EnumValues,proto3" json:"EnumValues,omitempty"` + // SubgraphIDs is the list of datasource IDs (e.g subgraph ID) that the input is used from + SubgraphIDs []string `protobuf:"bytes,6,rep,name=SubgraphIDs,proto3" json:"SubgraphIDs,omitempty"` } func (x *InputUsageInfo) Reset() { @@ -725,6 +736,13 @@ func (x *InputUsageInfo) GetEnumValues() []string { return nil } +func (x *InputUsageInfo) GetSubgraphIDs() []string { + if x != nil { + return x.SubgraphIDs + } + return nil +} + type PublishGraphQLRequestMetricsRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -994,91 +1012,95 @@ var file_wg_cosmo_graphqlmetrics_v1_graphqlmetrics_proto_rawDesc = []byte{ 0x72, 0x65, 0x63, 0x74, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x16, 0x49, 0x6e, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x46, 0x69, 0x65, 0x6c, 0x64, - 0x22, 0x77, 0x0a, 0x11, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x55, 0x73, 0x61, 0x67, - 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x50, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x09, 0x52, 0x04, 0x50, 0x61, 0x74, 0x68, 0x12, 0x1a, 0x0a, 0x08, 0x54, 0x79, 0x70, - 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x54, 0x79, 0x70, - 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x4e, - 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, - 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x22, 0x94, 0x01, 0x0a, 0x0e, 0x49, 0x6e, - 0x70, 0x75, 0x74, 0x55, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, - 0x50, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x50, 0x61, 0x74, 0x68, - 0x12, 0x1a, 0x0a, 0x08, 0x54, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x08, 0x54, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, - 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x43, 0x6f, 0x75, - 0x6e, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, - 0x12, 0x1e, 0x0a, 0x0a, 0x45, 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x05, - 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x45, 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, - 0x22, 0x74, 0x0a, 0x23, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x47, 0x72, 0x61, 0x70, 0x68, + 0x22, 0x99, 0x01, 0x0a, 0x11, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x55, 0x73, 0x61, + 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x50, 0x61, 0x74, 0x68, 0x18, 0x01, + 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x50, 0x61, 0x74, 0x68, 0x12, 0x1a, 0x0a, 0x08, 0x54, 0x79, + 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x54, 0x79, + 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x1c, 0x0a, 0x09, + 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x53, 0x75, + 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x49, 0x44, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, + 0x0b, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x49, 0x44, 0x73, 0x22, 0xb6, 0x01, 0x0a, + 0x0e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x55, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, + 0x12, 0x0a, 0x04, 0x50, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x50, + 0x61, 0x74, 0x68, 0x12, 0x1a, 0x0a, 0x08, 0x54, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x54, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, + 0x14, 0x0a, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, + 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, + 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, + 0x79, 0x70, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x45, 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, 0x65, + 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x45, 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, + 0x75, 0x65, 0x73, 0x12, 0x20, 0x0a, 0x0b, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x49, + 0x44, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0b, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, + 0x70, 0x68, 0x49, 0x44, 0x73, 0x22, 0x74, 0x0a, 0x23, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, + 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, + 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x4d, 0x0a, 0x0b, + 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x55, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x2b, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, + 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x53, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x55, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x0b, + 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x55, 0x73, 0x61, 0x67, 0x65, 0x22, 0x28, 0x0a, 0x26, 0x50, + 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, + 0x6f, 0x76, 0x65, 0x72, 0x61, 0x67, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x89, 0x01, 0x0a, 0x2d, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, + 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x4d, 0x0a, 0x0b, 0x53, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x55, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x77, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x58, 0x0a, 0x0b, 0x41, 0x67, 0x67, 0x72, 0x65, + 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x36, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, - 0x55, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x0b, 0x53, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x55, 0x73, 0x61, 0x67, 0x65, 0x22, 0x28, 0x0a, 0x26, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, - 0x68, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x76, 0x65, 0x72, 0x61, - 0x67, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x22, 0x89, 0x01, 0x0a, 0x2d, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, + 0x55, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0b, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x22, 0x30, 0x0a, 0x2e, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x58, 0x0a, 0x0b, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x36, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, - 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, - 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x55, 0x73, 0x61, 0x67, 0x65, - 0x49, 0x6e, 0x66, 0x6f, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, - 0x0b, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x30, 0x0a, 0x2e, - 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, - 0x64, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, - 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2a, 0x3a, - 0x0a, 0x0d, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x12, - 0x09, 0x0a, 0x05, 0x51, 0x55, 0x45, 0x52, 0x59, 0x10, 0x00, 0x12, 0x0c, 0x0a, 0x08, 0x4d, 0x55, - 0x54, 0x41, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x01, 0x12, 0x10, 0x0a, 0x0c, 0x53, 0x55, 0x42, 0x53, - 0x43, 0x52, 0x49, 0x50, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x02, 0x32, 0xf5, 0x02, 0x0a, 0x15, 0x47, - 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x53, 0x65, 0x72, - 0x76, 0x69, 0x63, 0x65, 0x12, 0x9e, 0x01, 0x0a, 0x15, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, - 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x12, 0x3f, + 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x2a, 0x3a, 0x0a, 0x0d, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x54, 0x79, 0x70, 0x65, 0x12, 0x09, 0x0a, 0x05, 0x51, 0x55, 0x45, 0x52, 0x59, 0x10, 0x00, 0x12, + 0x0c, 0x0a, 0x08, 0x4d, 0x55, 0x54, 0x41, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x01, 0x12, 0x10, 0x0a, + 0x0c, 0x53, 0x55, 0x42, 0x53, 0x43, 0x52, 0x49, 0x50, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x02, 0x32, + 0xf5, 0x02, 0x0a, 0x15, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x4d, 0x65, 0x74, 0x72, 0x69, + 0x63, 0x73, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x9e, 0x01, 0x0a, 0x15, 0x50, 0x75, + 0x62, 0x6c, 0x69, 0x73, 0x68, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x4d, 0x65, 0x74, 0x72, + 0x69, 0x63, 0x73, 0x12, 0x3f, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, + 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, + 0x2e, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x42, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, + 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, + 0x31, 0x2e, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x43, 0x6f, 0x76, 0x65, 0x72, 0x61, 0x67, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0xba, 0x01, 0x0a, 0x1f, 0x50, + 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, + 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x12, 0x49, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x75, 0x62, 0x6c, - 0x69, 0x73, 0x68, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x42, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, - 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x75, 0x62, - 0x6c, 0x69, 0x73, 0x68, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x76, - 0x65, 0x72, 0x61, 0x67, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0xba, 0x01, 0x0a, 0x1f, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, - 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, 0x47, 0x72, 0x61, 0x70, 0x68, - 0x51, 0x4c, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x12, 0x49, 0x2e, 0x77, 0x67, 0x2e, 0x63, + 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, 0x47, 0x72, 0x61, + 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, + 0x63, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x4a, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x1a, 0x4a, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, - 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, - 0x31, 0x2e, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, - 0x74, 0x65, 0x64, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x22, 0x00, 0x42, 0x9b, 0x02, 0x0a, 0x1e, 0x63, 0x6f, 0x6d, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, - 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, - 0x63, 0x73, 0x2e, 0x76, 0x31, 0x42, 0x13, 0x47, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, - 0x74, 0x72, 0x69, 0x63, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x59, 0x67, 0x69, - 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x77, 0x75, 0x6e, 0x64, 0x65, 0x72, 0x67, - 0x72, 0x61, 0x70, 0x68, 0x2f, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2f, 0x72, 0x6f, 0x75, 0x74, 0x65, - 0x72, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x77, 0x67, 0x2f, 0x63, - 0x6f, 0x73, 0x6d, 0x6f, 0x2f, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, - 0x69, 0x63, 0x73, 0x2f, 0x76, 0x31, 0x3b, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, - 0x74, 0x72, 0x69, 0x63, 0x73, 0x76, 0x31, 0xa2, 0x02, 0x03, 0x57, 0x43, 0x47, 0xaa, 0x02, 0x1a, - 0x57, 0x67, 0x2e, 0x43, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x47, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, - 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x56, 0x31, 0xca, 0x02, 0x1a, 0x57, 0x67, 0x5c, - 0x43, 0x6f, 0x73, 0x6d, 0x6f, 0x5c, 0x47, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, - 0x72, 0x69, 0x63, 0x73, 0x5c, 0x56, 0x31, 0xe2, 0x02, 0x26, 0x57, 0x67, 0x5c, 0x43, 0x6f, 0x73, - 0x6d, 0x6f, 0x5c, 0x47, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, - 0x73, 0x5c, 0x56, 0x31, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0xea, 0x02, 0x1d, 0x57, 0x67, 0x3a, 0x3a, 0x43, 0x6f, 0x73, 0x6d, 0x6f, 0x3a, 0x3a, 0x47, 0x72, - 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x3a, 0x3a, 0x56, 0x31, - 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x9b, 0x02, 0x0a, 0x1e, 0x63, 0x6f, 0x6d, 0x2e, + 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, + 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, 0x42, 0x13, 0x47, 0x72, 0x61, 0x70, + 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, + 0x01, 0x5a, 0x59, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x77, 0x75, + 0x6e, 0x64, 0x65, 0x72, 0x67, 0x72, 0x61, 0x70, 0x68, 0x2f, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2f, + 0x72, 0x6f, 0x75, 0x74, 0x65, 0x72, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x2f, 0x77, 0x67, 0x2f, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2f, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, + 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2f, 0x76, 0x31, 0x3b, 0x67, 0x72, 0x61, 0x70, + 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x76, 0x31, 0xa2, 0x02, 0x03, 0x57, + 0x43, 0x47, 0xaa, 0x02, 0x1a, 0x57, 0x67, 0x2e, 0x43, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x47, 0x72, + 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x56, 0x31, 0xca, + 0x02, 0x1a, 0x57, 0x67, 0x5c, 0x43, 0x6f, 0x73, 0x6d, 0x6f, 0x5c, 0x47, 0x72, 0x61, 0x70, 0x68, + 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x5c, 0x56, 0x31, 0xe2, 0x02, 0x26, 0x57, + 0x67, 0x5c, 0x43, 0x6f, 0x73, 0x6d, 0x6f, 0x5c, 0x47, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, + 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x5c, 0x56, 0x31, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x1d, 0x57, 0x67, 0x3a, 0x3a, 0x43, 0x6f, 0x73, 0x6d, + 0x6f, 0x3a, 0x3a, 0x47, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, + 0x73, 0x3a, 0x3a, 0x56, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/router/pkg/graphqlschemausage/schemausage.go b/router/pkg/graphqlschemausage/schemausage.go index 4ecfde37ec..59ab5db06b 100644 --- a/router/pkg/graphqlschemausage/schemausage.go +++ b/router/pkg/graphqlschemausage/schemausage.go @@ -1,7 +1,34 @@ +// Package graphqlschemausage extracts schema usage metrics from GraphQL operations, +// associating each type, field, argument, and input with the SubgraphIDs that provide them. +// +// # Architecture +// +// The challenge: Execution plans optimize for execution, not analysis. Variables are resolved +// away, and only final field selections remain. To track usage, we must correlate three sources: +// +// 1. Execution Plan - contains field → subgraph mappings (via Source.IDs) +// 2. Operation AST - contains argument and variable usage +// 3. Variable Values - contains actual input data (nested objects, scalars, etc.) +// +// We extract subgraph IDs by building intermediate mappings: +// +// plan → field paths → variables → input fields +// +// This enables accurate federated schema usage tracking, showing which subgraphs serve which +// parts of queries, even through variables and deeply nested input objects. +// +// # Usage Tracking Types +// +// 1. TYPE & FIELD: Direct extraction from execution plan (has Source.IDs) +// 2. ARGUMENT: Correlate AST arguments with plan field paths +// 3. INPUT: Build field→subgraph and variable→subgraph maps, then traverse variable values +// +// Special handling: Variable remapping for normalized operations (e.g., $a → $criteria), +// null value skipping (nulls don't represent actual usage). package graphqlschemausage import ( - "slices" + "strings" "github.com/wundergraph/astjson" "github.com/wundergraph/graphql-go-tools/v2/pkg/ast" @@ -29,6 +56,7 @@ type TypeFieldMetrics []*TypeFieldUsageInfo // IntoGraphQLMetrics converts the TypeFieldMetrics into a []*graphqlmetrics.TypeFieldUsageInfo func (t TypeFieldMetrics) IntoGraphQLMetrics() []*graphqlmetrics.TypeFieldUsageInfo { + // Pre-allocate slice with exact capacity metrics := make([]*graphqlmetrics.TypeFieldUsageInfo, len(t)) for i, info := range t { metrics[i] = info.IntoGraphQLMetrics() @@ -66,11 +94,23 @@ type typeFieldUsageInfoVisitor struct { func (p *typeFieldUsageInfoVisitor) visitNode(node resolve.Node, path []string) { switch t := node.(type) { case *resolve.Object: + // Pre-allocate the typeFieldUsageInfo slice with a reasonable capacity + // to reduce allocations during traversal + if p.typeFieldUsageInfo == nil { + // Estimate: average query has ~20-50 fields + p.typeFieldUsageInfo = make([]*TypeFieldUsageInfo, 0, 32) + } + for _, field := range t.Fields { if field.Info == nil { continue } - pathCopy := slices.Clone(append(path, field.Info.Name)) + + // create a new slice with exact capacity and copy elements + pathCopy := make([]string, len(path)+1) + copy(pathCopy, path) + pathCopy[len(path)] = field.Info.Name + p.typeFieldUsageInfo = append(p.typeFieldUsageInfo, &TypeFieldUsageInfo{ Path: pathCopy, ParentTypeNames: field.Info.ParentTypeNames, @@ -94,15 +134,172 @@ func (p *typeFieldUsageInfoVisitor) visitNode(node resolve.Node, path []string) } } -func GetArgumentUsageInfo(operation, definition *ast.Document) ([]*graphqlmetrics.ArgumentUsageInfo, error) { +// buildFieldSubgraphIDMap extracts field → subgraph mappings from the execution plan. +// Returns a map where keys are dot-separated paths (e.g., "user.orders") and values are subgraph IDs. +func buildFieldSubgraphIDMap(operationPlan plan.Plan) map[string][]string { + collector := &subgraphIDCollector{ + fieldMap: make(map[string][]string), + pathStack: make([]string, 0, 8), // Pre-allocate for typical depth + } + switch p := operationPlan.(type) { + case *plan.SynchronousResponsePlan: + collector.collectFromNode(p.Response.Data) + case *plan.SubscriptionResponsePlan: + collector.collectFromNode(p.Response.Response.Data) + } + return collector.fieldMap +} + +type subgraphIDCollector struct { + fieldMap map[string][]string + pathStack []string // Reusable path stack to avoid allocations +} + +func (c *subgraphIDCollector) collectFromNode(node resolve.Node) { + switch t := node.(type) { + case *resolve.Object: + for _, field := range t.Fields { + if field.Info == nil { + continue + } + // Push field name onto stack + c.pathStack = append(c.pathStack, field.Info.Name) + + // Store the subgraph IDs for this field path + pathKey := pathToKey(c.pathStack) + c.fieldMap[pathKey] = field.Info.Source.IDs + + c.collectFromNode(field.Value) + + // Pop field name from stack + c.pathStack = c.pathStack[:len(c.pathStack)-1] + } + case *resolve.Array: + c.collectFromNode(t.Item) + } +} + +// pathToKey converts a path slice to a string key for map lookups. +func pathToKey(path []string) string { + return strings.Join(path, ".") +} + +// buildVariableSubgraphMap maps variable names to subgraph IDs by analyzing which fields use them. +// Walks the operation AST to find variable usage (e.g., user(id: $userId)), then looks up +// the field's subgraph IDs from fieldSubgraphMap. Merges IDs if a variable is used by multiple fields. +func buildVariableSubgraphMap(operation, definition *ast.Document, fieldSubgraphMap map[string][]string) map[string][]string { + variableMap := make(map[string][]string) + walker := astvisitor.NewWalker(48) + collector := &variableSubgraphCollector{ + walker: &walker, + operation: operation, + definition: definition, + fieldSubgraphMap: fieldSubgraphMap, + variableMap: variableMap, + currentPath: make([]string, 0, 8), + } + walker.RegisterEnterFieldVisitor(collector) + walker.RegisterLeaveFieldVisitor(collector) + walker.RegisterEnterArgumentVisitor(collector) + rep := &operationreport.Report{} + walker.Walk(operation, definition, rep) + return variableMap +} + +type variableSubgraphCollector struct { + walker *astvisitor.Walker + operation *ast.Document + definition *ast.Document + fieldSubgraphMap map[string][]string + variableMap map[string][]string + currentPath []string +} + +// EnterField tracks the current field path for argument processing. +func (v *variableSubgraphCollector) EnterField(ref int) { + fieldName := v.operation.FieldNameString(ref) + v.currentPath = append(v.currentPath, fieldName) +} + +// LeaveField pops the field from the path when leaving. +func (v *variableSubgraphCollector) LeaveField(_ int) { + if len(v.currentPath) > 0 { + v.currentPath = v.currentPath[:len(v.currentPath)-1] + } +} + +// EnterArgument detects variable usage and associates variables with subgraph IDs. +// For user(id: $userId), maps "userId" → subgraph IDs of "user" field. +func (v *variableSubgraphCollector) EnterArgument(ref int) { + arg := v.operation.Arguments[ref] + + // Only process arguments that use variables (not inline values) + if arg.Value.Kind != ast.ValueKindVariable { + return + } + + varName := v.operation.VariableValueNameString(arg.Value.Ref) + if varName == "" { + return + } + + // Get subgraph IDs for the current field path + if len(v.currentPath) > 0 { + pathKey := pathToKey(v.currentPath) + if subgraphIDs, exists := v.fieldSubgraphMap[pathKey]; exists { + // Merge subgraph IDs for this variable + // (in case the variable is used by multiple fields from different subgraphs) + v.variableMap[varName] = mergeSubgraphIDs(v.variableMap[varName], subgraphIDs) + } + } +} + +// mergeSubgraphIDs combines two slices of subgraph IDs, removing duplicates. +// Used when a variable is used by fields from different subgraphs. +func mergeSubgraphIDs(a, b []string) []string { + if len(a) == 0 { + return b + } + if len(b) == 0 { + return a + } + + seen := make(map[string]bool, len(a)+len(b)) + result := make([]string, 0, len(a)+len(b)) + + for _, id := range a { + if !seen[id] { + seen[id] = true + result = append(result, id) + } + } + + for _, id := range b { + if !seen[id] { + seen[id] = true + result = append(result, id) + } + } + + return result +} + +func GetArgumentUsageInfo(operation, definition *ast.Document, operationPlan plan.Plan) ([]*graphqlmetrics.ArgumentUsageInfo, error) { + // Build a mapping of field paths to their subgraph IDs from the plan + subgraphIDMap := buildFieldSubgraphIDMap(operationPlan) + walker := astvisitor.NewWalker(48) visitor := &argumentUsageInfoVisitor{ - definition: definition, - operation: operation, - walker: &walker, + definition: definition, + operation: operation, + walker: &walker, + subgraphIDMap: subgraphIDMap, + // Pre-allocate with reasonable capacity to reduce allocations + usage: make([]*graphqlmetrics.ArgumentUsageInfo, 0, 16), } walker.RegisterEnterArgumentVisitor(visitor) walker.RegisterEnterFieldVisitor(visitor) + walker.RegisterLeaveFieldVisitor(visitor) rep := &operationreport.Report{} walker.Walk(operation, definition, rep) if rep.HasErrors() { @@ -115,11 +312,23 @@ type argumentUsageInfoVisitor struct { walker *astvisitor.Walker definition, operation *ast.Document fieldEnclosingNode ast.Node + subgraphIDMap map[string][]string + currentPath []string usage []*graphqlmetrics.ArgumentUsageInfo } -func (a *argumentUsageInfoVisitor) EnterField(_ int) { +func (a *argumentUsageInfoVisitor) EnterField(ref int) { a.fieldEnclosingNode = a.walker.EnclosingTypeDefinition + // Track the current field path for subgraph ID lookup + fieldName := a.operation.FieldNameString(ref) + a.currentPath = append(a.currentPath, fieldName) +} + +func (a *argumentUsageInfoVisitor) LeaveField(_ int) { + // Remove the current field from the path when leaving + if len(a.currentPath) > 0 { + a.currentPath = a.currentPath[:len(a.currentPath)-1] + } } func (a *argumentUsageInfoVisitor) EnterArgument(ref int) { @@ -136,55 +345,126 @@ func (a *argumentUsageInfoVisitor) EnterArgument(ref int) { } argType := a.definition.InputValueDefinitionType(argDef) typeName := a.definition.ResolveTypeNameBytes(argType) + + // Look up subgraph IDs for the current field path + var subgraphIDs []string + if len(a.currentPath) > 0 { + pathKey := pathToKey(a.currentPath) + if ids, exists := a.subgraphIDMap[pathKey]; exists { + subgraphIDs = ids + } + } + a.usage = append(a.usage, &graphqlmetrics.ArgumentUsageInfo{ - Path: []string{string(fieldName), string(argName)}, - TypeName: string(enclosingTypeName), - NamedType: string(typeName), + Path: []string{string(fieldName), string(argName)}, + TypeName: string(enclosingTypeName), + NamedType: string(typeName), + SubgraphIDs: subgraphIDs, }) } -func GetInputUsageInfo(operation, definition *ast.Document, variables *astjson.Value) ([]*graphqlmetrics.InputUsageInfo, error) { +// GetInputUsageInfo extracts usage for input types and fields from variable values. +// Builds field/variable → subgraph mappings, then traverses variable values to apply subgraph IDs. +// Handles nested inputs, scalars, and variable name remapping (e.g., normalized $a → original $criteria). +// Skips null values as they don't represent actual usage. +func GetInputUsageInfo(operation, definition *ast.Document, variables *astjson.Value, operationPlan plan.Plan, remapVariables map[string]string) ([]*graphqlmetrics.InputUsageInfo, error) { + // Build a mapping of field paths to their subgraph IDs from the plan + subgraphIDMap := buildFieldSubgraphIDMap(operationPlan) + + // Build a mapping of variables to the fields that use them and their subgraph IDs + variableSubgraphMap := buildVariableSubgraphMap(operation, definition, subgraphIDMap) + visitor := &inputUsageInfoVisitor{ - operation: operation, - definition: definition, - variables: variables, + operation: operation, + definition: definition, + variables: variables, + variableSubgraphMap: variableSubgraphMap, + remapVariables: remapVariables, + // Pre-allocate with reasonable capacity to reduce allocations + usage: make([]*graphqlmetrics.InputUsageInfo, 0, 16), } + for i := range operation.VariableDefinitions { visitor.EnterVariableDefinition(i) } + return visitor.usage, nil } type inputUsageInfoVisitor struct { definition, operation *ast.Document variables *astjson.Value + variableSubgraphMap map[string][]string + remapVariables map[string]string + currentVariableName string usage []*graphqlmetrics.InputUsageInfo } func (v *inputUsageInfoVisitor) EnterVariableDefinition(ref int) { varTypeRef := v.operation.VariableDefinitions[ref].Type - varName := v.operation.VariableValueNameString(v.operation.VariableDefinitions[ref].VariableValue.Ref) varTypeName := v.operation.ResolveTypeNameString(varTypeRef) - jsonField := v.variables.Get(varName) + + // Get the variable name from the (possibly normalized/minified) operation AST + // After normalization, variable names may be shortened: $criteria → $a + normalizedVarName := v.operation.VariableValueNameString(v.operation.VariableDefinitions[ref].VariableValue.Ref) + + // Map the normalized name back to the original if remapping is available + // The variables JSON always uses original names, but the AST uses normalized names + // Example: AST has "$a", remapVariables["a"] = "criteria", JSON has {"criteria": {...}} + originalVarName := normalizedVarName + if v.remapVariables != nil { + if remapped, exists := v.remapVariables[normalizedVarName]; exists { + originalVarName = remapped + } + } + + // Look up the variable value using the original name + jsonField := v.variables.Get(originalVarName) if jsonField == nil { return } - v.traverseVariable(jsonField, varName, varTypeName, "") + + // Skip null values - they don't represent actual schema usage + if jsonField.Type() == astjson.TypeNull { + return + } + + // Use the normalized name for subgraph ID lookup (it matches the AST structure) + v.currentVariableName = normalizedVarName + v.traverseVariable(jsonField, originalVarName, varTypeName, "") } +// traverseVariable recursively processes variable values, tracking input types and fields. +// Handles scalars, enums, input objects, and arrays. SubgraphIDs inherited from variableSubgraphMap. func (v *inputUsageInfoVisitor) traverseVariable(jsonValue *astjson.Value, fieldName, typeName, parentTypeName string) { defNode, ok := v.definition.NodeByNameStr(typeName) - if !ok { - return - } + usageInfo := &graphqlmetrics.InputUsageInfo{ NamedType: typeName, } if parentTypeName != "" { usageInfo.TypeName = parentTypeName + // Pre-allocate Path slice with exact capacity usageInfo.Path = []string{parentTypeName, fieldName} } + // Get subgraph IDs for this variable from the mapping built in STEP 2 + // All fields in this variable inherit the same subgraph IDs + if v.currentVariableName != "" { + if subgraphIDs, exists := v.variableSubgraphMap[v.currentVariableName]; exists { + usageInfo.SubgraphIDs = subgraphIDs + } + } + + // If the type is not found in the definition (e.g., built-in scalars like Boolean, String, Int), + // we still want to track its usage. + // Built-in scalars don't have type definitions in the schema document. + if !ok { + // This is likely a built-in scalar type, track it and return + v.appendUniqueUsage(usageInfo) + return + } + switch defNode.Kind { case ast.NodeKindInputObjectTypeDefinition: switch jsonValue.Type() { @@ -195,6 +475,11 @@ func (v *inputUsageInfoVisitor) traverseVariable(jsonValue *astjson.Value, field case astjson.TypeObject: o := jsonValue.GetObject() o.Visit(func(key []byte, value *astjson.Value) { + // Skip null fields - they don't represent actual schema usage + if value.Type() == astjson.TypeNull { + return + } + fieldRef := v.definition.InputObjectTypeDefinitionInputValueDefinitionByName(defNode.Ref, key) if fieldRef == -1 { return @@ -216,11 +501,16 @@ func (v *inputUsageInfoVisitor) traverseVariable(jsonValue *astjson.Value, field usageInfo.EnumValues = []string{string(jsonValue.GetStringBytes())} case astjson.TypeArray: arr := jsonValue.GetArray() + // Pre-allocate EnumValues slice with exact capacity usageInfo.EnumValues = make([]string, len(arr)) for i, arrayValue := range arr { usageInfo.EnumValues[i] = string(arrayValue.GetStringBytes()) } } + case ast.NodeKindScalarTypeDefinition: + // Custom scalar types defined in the schema (e.g., DateTime, JSON, Upload) + // Just track the usage, no special handling needed since we can't inspect + // the internal structure of custom scalars } v.appendUniqueUsage(usageInfo) @@ -261,5 +551,13 @@ func (v *inputUsageInfoVisitor) infoEquals(a, b *graphqlmetrics.InputUsageInfo) return false } } + if len(a.SubgraphIDs) != len(b.SubgraphIDs) { + return false + } + for i := range a.SubgraphIDs { + if a.SubgraphIDs[i] != b.SubgraphIDs[i] { + return false + } + } return true } diff --git a/router/pkg/graphqlschemausage/schemausage_test.go b/router/pkg/graphqlschemausage/schemausage_test.go index 62cf52b6ae..39df6da912 100644 --- a/router/pkg/graphqlschemausage/schemausage_test.go +++ b/router/pkg/graphqlschemausage/schemausage_test.go @@ -210,9 +210,9 @@ func TestGetSchemaUsageInfo(t *testing.T) { assert.NoError(t, err) fieldUsageInfo := GetTypeFieldUsageInfo(generatedPlan) - argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def) + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, generatedPlan) assert.NoError(t, err) - inputUsageInfo, err := GetInputUsageInfo(&op, &def, merged) + inputUsageInfo, err := GetInputUsageInfo(&op, &def, merged, generatedPlan, nil) assert.NoError(t, err) subscription := &plan.SubscriptionResponsePlan{ @@ -222,9 +222,9 @@ func TestGetSchemaUsageInfo(t *testing.T) { } subscriptionFieldUsageInfo := GetTypeFieldUsageInfo(subscription) - subscriptionArgumentUsageInfo, err := GetArgumentUsageInfo(&op, &def) + subscriptionArgumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, subscription) assert.NoError(t, err) - subscriptionInputUsageInfo, err := GetInputUsageInfo(&op, &def, merged) + subscriptionInputUsageInfo, err := GetInputUsageInfo(&op, &def, merged, subscription, nil) assert.NoError(t, err) assert.Equal(t, fieldUsageInfo, subscriptionFieldUsageInfo) @@ -284,82 +284,98 @@ func TestGetSchemaUsageInfo(t *testing.T) { expectedArgumentUsageInfo := []*graphqlmetricsv1.ArgumentUsageInfo{ { - TypeName: "Query", - NamedType: "String", - Path: []string{"searchResults", "name"}, + TypeName: "Query", + NamedType: "String", + Path: []string{"searchResults", "name"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, }, { - TypeName: "Query", - NamedType: "SearchFilter", - Path: []string{"searchResults", "filter"}, + TypeName: "Query", + NamedType: "SearchFilter", + Path: []string{"searchResults", "filter"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, }, { - TypeName: "Query", - NamedType: "SearchFilter", - Path: []string{"searchResults", "filter2"}, + TypeName: "Query", + NamedType: "SearchFilter", + Path: []string{"searchResults", "filter2"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, }, { - TypeName: "Query", - NamedType: "Episode", - Path: []string{"searchResults", "enumValue"}, + TypeName: "Query", + NamedType: "Episode", + Path: []string{"searchResults", "enumValue"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, }, { - TypeName: "Query", - NamedType: "Episode", - Path: []string{"searchResults", "enumList"}, + TypeName: "Query", + NamedType: "Episode", + Path: []string{"searchResults", "enumList"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, }, { - TypeName: "Query", - NamedType: "Episode", - Path: []string{"searchResults", "enumList2"}, + TypeName: "Query", + NamedType: "Episode", + Path: []string{"searchResults", "enumList2"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, }, { - TypeName: "Query", - NamedType: "SearchFilter", - Path: []string{"searchResults", "filterList"}, + TypeName: "Query", + NamedType: "SearchFilter", + Path: []string{"searchResults", "filterList"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, }, { - TypeName: "Human", - NamedType: "String", - Path: []string{"inlineName", "name"}, + TypeName: "Human", + NamedType: "String", + Path: []string{"inlineName", "name"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, }, } expectedInputUsageInfo := []graphqlmetricsv1.InputUsageInfo{ { - NamedType: "String", + NamedType: "String", + SubgraphIDs: []string{"https://swapi.dev/api"}, }, { - NamedType: "Episode", - TypeName: "SearchFilter", - EnumValues: []string{"NEWHOPE"}, - Path: []string{"SearchFilter", "enumField"}, + NamedType: "Episode", + TypeName: "SearchFilter", + EnumValues: []string{"NEWHOPE"}, + Path: []string{"SearchFilter", "enumField"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, }, { - NamedType: "SearchFilter", + NamedType: "SearchFilter", + SubgraphIDs: []string{"https://swapi.dev/api"}, }, { - NamedType: "Episode", - EnumValues: []string{"EMPIRE"}, + NamedType: "Episode", + EnumValues: []string{"EMPIRE"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, }, { - NamedType: "Episode", - EnumValues: []string{"JEDI", "EMPIRE", "NEWHOPE"}, + NamedType: "Episode", + EnumValues: []string{"JEDI", "EMPIRE", "NEWHOPE"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, }, { - NamedType: "String", - TypeName: "SearchFilter", - Path: []string{"SearchFilter", "excludeName"}, + NamedType: "String", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "excludeName"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, }, { - NamedType: "Episode", - TypeName: "SearchFilter", - EnumValues: []string{"JEDI"}, - Path: []string{"SearchFilter", "enumField"}, + NamedType: "Episode", + TypeName: "SearchFilter", + EnumValues: []string{"JEDI"}, + Path: []string{"SearchFilter", "enumField"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, }, { - NamedType: "Episode", - EnumValues: []string{"JEDI", "EMPIRE"}, + NamedType: "Episode", + EnumValues: []string{"JEDI", "EMPIRE"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, }, } @@ -456,9 +472,9 @@ func TestGetSchemaUsageInfoInterfaces(t *testing.T) { } fieldUsageInfo := GetTypeFieldUsageInfo(generatedPlan) - argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def) + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, generatedPlan) assert.NoError(t, err) - inputUsageInfo, err := GetInputUsageInfo(&op, &def, astjson.MustParse(`{}`)) + inputUsageInfo, err := GetInputUsageInfo(&op, &def, astjson.MustParse(`{}`), generatedPlan, nil) assert.NoError(t, err) subscription := &plan.SubscriptionResponsePlan{ @@ -468,9 +484,9 @@ func TestGetSchemaUsageInfoInterfaces(t *testing.T) { } subscriptionFieldUsageInfo := GetTypeFieldUsageInfo(subscription) - subscriptionArgumentUsageInfo, err := GetArgumentUsageInfo(&op, &def) + subscriptionArgumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, subscription) assert.NoError(t, err) - subscriptionInputUsageInfo, err := GetInputUsageInfo(&op, &def, astjson.MustParse(`{}`)) + subscriptionInputUsageInfo, err := GetInputUsageInfo(&op, &def, astjson.MustParse(`{}`), subscription, nil) assert.NoError(t, err) assert.Equal(t, fieldUsageInfo, subscriptionFieldUsageInfo) @@ -511,6 +527,743 @@ func TestGetSchemaUsageInfoInterfaces(t *testing.T) { } } +// TestInputUsageWithNullVariables verifies that null variable values are not tracked as schema usage +func TestInputUsageWithNullVariables(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + findEmployees(criteria: EmployeeSearchInput): [Employee!]! + } + + type Employee { + id: ID! + } + + input EmployeeSearchInput { + hasPets: Boolean + department: String + } + ` + + operation := ` + query FindEmployees($criteria: EmployeeSearchInput) { + findEmployees(criteria: $criteria) { + id + } + } + ` + + // Test with null value + variables := `{"criteria": null}` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "employees-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"findEmployees"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Employee", FieldNames: []string{"id"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "FindEmployees", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Should be empty because the variable value is null + assert.Empty(t, inputUsageInfo, "Null variable values should not be tracked as usage") +} + +// TestInputUsageWithPartialNullFields verifies that null fields within input objects are not tracked +func TestInputUsageWithPartialNullFields(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + findEmployees(criteria: EmployeeSearchInput): [Employee!]! + } + + type Employee { + id: ID! + } + + input EmployeeSearchInput { + hasPets: Boolean + department: String + minAge: Int + } + ` + + operation := ` + query FindEmployees($criteria: EmployeeSearchInput) { + findEmployees(criteria: $criteria) { + id + } + } + ` + + // Test with some null fields - only hasPets should be tracked, not department or minAge + variables := `{"criteria": {"hasPets": true, "department": null, "minAge": null}}` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "employees-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"findEmployees"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Employee", FieldNames: []string{"id"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "FindEmployees", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Should only track the input type and hasPets field, not the null fields + expectedUsage := []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "Boolean", + TypeName: "EmployeeSearchInput", + Path: []string{"EmployeeSearchInput", "hasPets"}, + SubgraphIDs: []string{"employees-subgraph"}, + }, + { + NamedType: "EmployeeSearchInput", + SubgraphIDs: []string{"employees-subgraph"}, + }, + } + + assert.Len(t, inputUsageInfo, len(expectedUsage), "Should only track non-null fields") + for i := range expectedUsage { + assert.JSONEq(t, prettyJSON(t, &expectedUsage[i]), prettyJSON(t, inputUsageInfo[i]), "inputUsageInfo[%d]", i) + } +} + +// TestInputScalarFieldsInVariables specifically tests that scalar fields inside input objects +// are tracked when passed as variables (not inline) +func TestInputScalarFieldsInVariables(t *testing.T) { + // Create a simple schema with input type containing scalar fields + schema := ` + schema { + query: Query + } + + type Query { + findEmployees(criteria: EmployeeSearchInput!): [Employee!]! + } + + type Employee { + id: ID! + } + + input EmployeeSearchInput { + hasPets: Boolean! + minAge: Int + department: String + } + ` + + operation := ` + query FindEmployeesWithVariable($criteria: EmployeeSearchInput!) { + findEmployees(criteria: $criteria) { + id + } + } + ` + + variables := `{"criteria": {"hasPets": true, "minAge": 25, "department": "Engineering"}}` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "employees-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"findEmployees"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Employee", FieldNames: []string{"id"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "FindEmployeesWithVariable", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + expectedInputUsageInfo := []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "Boolean", + TypeName: "EmployeeSearchInput", + Path: []string{"EmployeeSearchInput", "hasPets"}, + SubgraphIDs: []string{"employees-subgraph"}, + }, + { + NamedType: "Int", + TypeName: "EmployeeSearchInput", + Path: []string{"EmployeeSearchInput", "minAge"}, + SubgraphIDs: []string{"employees-subgraph"}, + }, + { + NamedType: "String", + TypeName: "EmployeeSearchInput", + Path: []string{"EmployeeSearchInput", "department"}, + SubgraphIDs: []string{"employees-subgraph"}, + }, + { + NamedType: "EmployeeSearchInput", + SubgraphIDs: []string{"employees-subgraph"}, + }, + } + + assert.Len(t, inputUsageInfo, len(expectedInputUsageInfo)) + for i := range expectedInputUsageInfo { + assert.JSONEq(t, prettyJSON(t, &expectedInputUsageInfo[i]), prettyJSON(t, inputUsageInfo[i]), "inputUsageInfo[%d]", i) + } +} + +// TestInputNestedScalarFields tests that scalar fields inside nested input objects +// are tracked correctly with proper paths and subgraph IDs +func TestInputNestedScalarFields(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + search(filter: SearchFilter!): [Result!]! + } + + type Result { + id: ID! + } + + input SearchFilter { + name: String + criteria: SearchCriteria + tags: [String] + } + + input SearchCriteria { + minScore: Int! + maxScore: Int + isActive: Boolean + nested: NestedCriteria + } + + input NestedCriteria { + value: String! + } + ` + + operation := ` + query SearchQuery($filter: SearchFilter!) { + search(filter: $filter) { + id + } + } + ` + + variables := `{ + "filter": { + "name": "test", + "criteria": { + "minScore": 10, + "maxScore": 100, + "isActive": true, + "nested": { + "value": "deep" + } + }, + "tags": ["tag1", "tag2"] + } + }` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "search-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"search"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Result", FieldNames: []string{"id"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "SearchQuery", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + expectedInputUsageInfo := []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "String", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "name"}, + SubgraphIDs: []string{"search-subgraph"}, + }, + { + NamedType: "Int", + TypeName: "SearchCriteria", + Path: []string{"SearchCriteria", "minScore"}, + SubgraphIDs: []string{"search-subgraph"}, + }, + { + NamedType: "Int", + TypeName: "SearchCriteria", + Path: []string{"SearchCriteria", "maxScore"}, + SubgraphIDs: []string{"search-subgraph"}, + }, + { + NamedType: "Boolean", + TypeName: "SearchCriteria", + Path: []string{"SearchCriteria", "isActive"}, + SubgraphIDs: []string{"search-subgraph"}, + }, + { + NamedType: "String", + TypeName: "NestedCriteria", + Path: []string{"NestedCriteria", "value"}, + SubgraphIDs: []string{"search-subgraph"}, + }, + { + NamedType: "NestedCriteria", + TypeName: "SearchCriteria", + Path: []string{"SearchCriteria", "nested"}, + SubgraphIDs: []string{"search-subgraph"}, + }, + { + NamedType: "SearchCriteria", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "criteria"}, + SubgraphIDs: []string{"search-subgraph"}, + }, + { + NamedType: "String", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "tags"}, + SubgraphIDs: []string{"search-subgraph"}, + }, + { + NamedType: "SearchFilter", + SubgraphIDs: []string{"search-subgraph"}, + }, + } + + assert.Len(t, inputUsageInfo, len(expectedInputUsageInfo)) + for i := range expectedInputUsageInfo { + assert.JSONEq(t, prettyJSON(t, &expectedInputUsageInfo[i]), prettyJSON(t, inputUsageInfo[i]), "inputUsageInfo[%d]", i) + } +} + +// TestMultipleSubgraphs tests that SubgraphIDs are correctly extracted when +// fields, arguments, and inputs come from different subgraphs +func TestMultipleSubgraphs(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + user(id: ID!): User + product(filter: ProductFilter!): Product + } + + type User { + id: ID! + name: String! + orders: [Order!]! + } + + type Order { + id: ID! + total: Float! + } + + type Product { + id: ID! + name: String! + price: Float! + } + + input ProductFilter { + minPrice: Float + maxPrice: Float + category: String + } + ` + + operation := ` + query GetData($userId: ID!, $productFilter: ProductFilter!) { + user(id: $userId) { + id + name + orders { + id + total + } + } + product(filter: $productFilter) { + id + name + price + } + } + ` + + variables := `{ + "userId": "123", + "productFilter": { + "minPrice": 10.0, + "maxPrice": 100.0, + "category": "electronics" + } + }` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + // Create multiple subgraphs - users and products come from different sources + usersSubgraph, err := plan.NewDataSourceConfiguration[any]( + "users-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"user"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "User", FieldNames: []string{"id", "name", "orders"}}, + {TypeName: "Order", FieldNames: []string{"id", "total"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + productsSubgraph, err := plan.NewDataSourceConfiguration[any]( + "products-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"product"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Product", FieldNames: []string{"id", "name", "price"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{usersSubgraph, productsSubgraph}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "GetData", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + fieldUsageInfo := GetTypeFieldUsageInfo(generatedPlan) + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, generatedPlan) + require.NoError(t, err) + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Verify field usage - fields should be attributed to the correct subgraph + expectedFieldUsageInfo := []*graphqlmetricsv1.TypeFieldUsageInfo{ + { + TypeNames: []string{"Query"}, + Path: []string{"user"}, + NamedType: "User", + SubgraphIDs: []string{"users-subgraph"}, + }, + { + TypeNames: []string{"User"}, + Path: []string{"user", "id"}, + NamedType: "ID", + SubgraphIDs: []string{"users-subgraph"}, + }, + { + TypeNames: []string{"User"}, + Path: []string{"user", "name"}, + NamedType: "String", + SubgraphIDs: []string{"users-subgraph"}, + }, + { + TypeNames: []string{"User"}, + Path: []string{"user", "orders"}, + NamedType: "Order", + SubgraphIDs: []string{"users-subgraph"}, + }, + { + TypeNames: []string{"Order"}, + Path: []string{"user", "orders", "id"}, + NamedType: "ID", + SubgraphIDs: []string{"users-subgraph"}, + }, + { + TypeNames: []string{"Order"}, + Path: []string{"user", "orders", "total"}, + NamedType: "Float", + SubgraphIDs: []string{"users-subgraph"}, + }, + { + TypeNames: []string{"Query"}, + Path: []string{"product"}, + NamedType: "Product", + SubgraphIDs: []string{"products-subgraph"}, + }, + { + TypeNames: []string{"Product"}, + Path: []string{"product", "id"}, + NamedType: "ID", + SubgraphIDs: []string{"products-subgraph"}, + }, + { + TypeNames: []string{"Product"}, + Path: []string{"product", "name"}, + NamedType: "String", + SubgraphIDs: []string{"products-subgraph"}, + }, + { + TypeNames: []string{"Product"}, + Path: []string{"product", "price"}, + NamedType: "Float", + SubgraphIDs: []string{"products-subgraph"}, + }, + } + + // Verify argument usage - arguments should be attributed to the correct subgraph + expectedArgumentUsageInfo := []*graphqlmetricsv1.ArgumentUsageInfo{ + { + TypeName: "Query", + NamedType: "ID", + Path: []string{"user", "id"}, + SubgraphIDs: []string{"users-subgraph"}, + }, + { + TypeName: "Query", + NamedType: "ProductFilter", + Path: []string{"product", "filter"}, + SubgraphIDs: []string{"products-subgraph"}, + }, + } + + // Verify input usage - inputs should be attributed to the correct subgraph + expectedInputUsageInfo := []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "ID", + SubgraphIDs: []string{"users-subgraph"}, + }, + { + NamedType: "Float", + TypeName: "ProductFilter", + Path: []string{"ProductFilter", "minPrice"}, + SubgraphIDs: []string{"products-subgraph"}, + }, + { + NamedType: "Float", + TypeName: "ProductFilter", + Path: []string{"ProductFilter", "maxPrice"}, + SubgraphIDs: []string{"products-subgraph"}, + }, + { + NamedType: "String", + TypeName: "ProductFilter", + Path: []string{"ProductFilter", "category"}, + SubgraphIDs: []string{"products-subgraph"}, + }, + { + NamedType: "ProductFilter", + SubgraphIDs: []string{"products-subgraph"}, + }, + } + + // Assert all expectations + assert.Len(t, fieldUsageInfo, len(expectedFieldUsageInfo)) + for i := range expectedFieldUsageInfo { + assert.JSONEq(t, prettyJSON(t, expectedFieldUsageInfo[i]), prettyJSON(t, fieldUsageInfo[i].IntoGraphQLMetrics()), "fieldUsageInfo[%d]", i) + } + + assert.Len(t, argumentUsageInfo, len(expectedArgumentUsageInfo)) + for i := range expectedArgumentUsageInfo { + assert.JSONEq(t, prettyJSON(t, expectedArgumentUsageInfo[i]), prettyJSON(t, argumentUsageInfo[i]), "argumentUsageInfo[%d]", i) + } + + assert.Len(t, inputUsageInfo, len(expectedInputUsageInfo)) + for i := range expectedInputUsageInfo { + assert.JSONEq(t, prettyJSON(t, &expectedInputUsageInfo[i]), prettyJSON(t, inputUsageInfo[i]), "inputUsageInfo[%d]", i) + } + + // Additionally, verify that no field is wrongly attributed to the wrong subgraph + for _, info := range fieldUsageInfo { + if len(info.Path) > 0 { + firstPath := info.Path[0] + if firstPath == "user" { + assert.Equal(t, []string{"users-subgraph"}, info.SubgraphIDs, "user fields should only reference users-subgraph") + } else if firstPath == "product" { + assert.Equal(t, []string{"products-subgraph"}, info.SubgraphIDs, "product fields should only reference products-subgraph") + } + } + } + + // Verify arguments are attributed correctly + for _, info := range argumentUsageInfo { + if len(info.Path) > 0 { + firstPath := info.Path[0] + if firstPath == "user" { + assert.Equal(t, []string{"users-subgraph"}, info.SubgraphIDs, "user arguments should reference users-subgraph") + } else if firstPath == "product" { + assert.Equal(t, []string{"products-subgraph"}, info.SubgraphIDs, "product arguments should reference products-subgraph") + } + } + } +} + func prettyJSON(t *testing.T, v interface{}) string { b, err := json.MarshalIndent(v, "", " ") require.NoError(t, err) From 9c233aa1d8ef0b4e3e40dd8a465e50be893651a3 Mon Sep 17 00:00:00 2001 From: JivusAyrus Date: Wed, 19 Nov 2025 03:24:43 +0530 Subject: [PATCH 02/22] fix: operation checks for input and argument changes --- .../services/SchemaUsageTrafficInspector.ts | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/controlplane/src/core/services/SchemaUsageTrafficInspector.ts b/controlplane/src/core/services/SchemaUsageTrafficInspector.ts index 36820ae405..622f00572b 100644 --- a/controlplane/src/core/services/SchemaUsageTrafficInspector.ts +++ b/controlplane/src/core/services/SchemaUsageTrafficInspector.ts @@ -304,16 +304,14 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In case ChangeType.InputFieldAdded: { return { schemaChangeId: schemaCheckId, - fieldName: path[1], + // passing only the type name, as we want to return all the ops which use this input type. typeName: path[0], isInput: true, }; } // 1. When an argument has changed, we know the exact path to the argument e.g. 'Query.engineer.id' // and the type name e.g. 'Query' - case ChangeType.FieldArgumentRemoved: - case ChangeType.FieldArgumentAdded: // Only when a required argument is added - case ChangeType.FieldArgumentTypeChanged: { + case ChangeType.FieldArgumentRemoved: { return { schemaChangeId: schemaCheckId, path: path.slice(1), // The path to the updated argument e.g. 'engineer.name' of the type names @@ -321,6 +319,18 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In isArgument: true, }; } + + // Only when a required argument is added or type of an argument has changed to a required type + case ChangeType.FieldArgumentAdded: + case ChangeType.FieldArgumentTypeChanged: { + return { + schemaChangeId: schemaCheckId, + // The path should be just the query/mutation/subscription name + // e.g. if 'Query.employee.a', the path should be ['employee'] as its new field or it has changed the type of the argument, we check the usage of the operation. + path: path.slice(1, 2), + typeName: path[0], // Enclosing type e.g. 'Query' or 'Engineer' when the argument is on a field of type Engineer + }; + } } // no return to enforce that all cases are handled } From 6169226566a65728d6791b4dc2cc483a76b095d2 Mon Sep 17 00:00:00 2001 From: StarpTech Date: Thu, 20 Nov 2025 11:36:19 +0100 Subject: [PATCH 03/22] chore: add bench --- .../schemausage_bench_test.go | 336 ++++++++++++++++++ 1 file changed, 336 insertions(+) create mode 100644 router/pkg/graphqlschemausage/schemausage_bench_test.go diff --git a/router/pkg/graphqlschemausage/schemausage_bench_test.go b/router/pkg/graphqlschemausage/schemausage_bench_test.go new file mode 100644 index 0000000000..87c0f9ce1e --- /dev/null +++ b/router/pkg/graphqlschemausage/schemausage_bench_test.go @@ -0,0 +1,336 @@ +package graphqlschemausage + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/require" + "github.com/wundergraph/astjson" + "github.com/wundergraph/graphql-go-tools/v2/pkg/ast" + "github.com/wundergraph/graphql-go-tools/v2/pkg/astnormalization" + "github.com/wundergraph/graphql-go-tools/v2/pkg/astparser" + "github.com/wundergraph/graphql-go-tools/v2/pkg/asttransform" + "github.com/wundergraph/graphql-go-tools/v2/pkg/astvalidation" + "github.com/wundergraph/graphql-go-tools/v2/pkg/engine/plan" + "github.com/wundergraph/graphql-go-tools/v2/pkg/operationreport" +) + +// setupBenchmark creates a realistic schema usage scenario for benchmarking +// Returns: plan, operation doc, definition doc, variables +func setupBenchmark(b *testing.B) (plan.Plan, *ast.Document, *ast.Document, *astjson.Value) { + b.Helper() + + operation := ` + query Search($name: String! $filter2: SearchFilter $enumValue: Episode $enumList: [Episode]) { + searchResults(name: $name, filter: {excludeName: "Jannik"} filter2: $filter2, enumValue: $enumValue enumList: $enumList) { + __typename + ... on Human { + name + inlineName(name: "Jannik") + } + ... on Droid { + name + } + } + hero { + name + } + } + ` + + variables := `{"name":"Jannik","filter2":{"enumField":"NEWHOPE"},"enumValue":"EMPIRE","enumList":["JEDI","EMPIRE"]}` + + // Parse schema + def, rep := astparser.ParseGraphqlDocumentString(schemaUsageInfoTestSchema) + require.False(b, rep.HasErrors()) + + // Parse operation + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(b, rep.HasErrors()) + + // Merge and normalize + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(b, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(b, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(b, report.HasErrors()) + + // Create data source configuration + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "https://swapi.dev/api", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"searchResults", "hero"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Human", FieldNames: []string{"name", "inlineName"}}, + {TypeName: "Droid", FieldNames: []string{"name"}}, + {TypeName: "SearchResult", FieldNames: []string{"__typename"}}, + {TypeName: "Character", FieldNames: []string{"name", "friends"}}, + }, + }, + nil, + ) + require.NoError(b, err) + + // Create planner + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(b, err) + + // Generate plan + generatedPlan := planner.Plan(&op, &def, "Search", report) + require.False(b, report.HasErrors()) + + // Parse variables + vars, err := astjson.Parse(variables) + require.NoError(b, err) + + inputVariables, err := astjson.ParseBytes(op.Input.Variables) + require.NoError(b, err) + + merged, _, err := astjson.MergeValues(vars, inputVariables) + require.NoError(b, err) + + return generatedPlan, &op, &def, merged +} + +// BenchmarkGetTypeFieldUsageInfo measures memory allocations when extracting field usage from a plan +func BenchmarkGetTypeFieldUsageInfo(b *testing.B) { + generatedPlan, _, _, _ := setupBenchmark(b) + + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + result := GetTypeFieldUsageInfo(generatedPlan) + _ = result // Prevent compiler optimization + } +} + +// BenchmarkGetArgumentUsageInfo measures memory allocations when extracting argument usage +func BenchmarkGetArgumentUsageInfo(b *testing.B) { + generatedPlan, operation, definition, _ := setupBenchmark(b) + + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + result, err := GetArgumentUsageInfo(operation, definition, generatedPlan) + if err != nil { + b.Fatal(err) + } + _ = result // Prevent compiler optimization + } +} + +// BenchmarkGetInputUsageInfo measures memory allocations when extracting input variable usage +func BenchmarkGetInputUsageInfo(b *testing.B) { + generatedPlan, operation, definition, variables := setupBenchmark(b) + + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + result, err := GetInputUsageInfo(operation, definition, variables, generatedPlan, nil) + if err != nil { + b.Fatal(err) + } + _ = result // Prevent compiler optimization + } +} + +// BenchmarkIntoGraphQLMetrics measures memory allocations when converting to protobuf format +func BenchmarkIntoGraphQLMetrics(b *testing.B) { + generatedPlan, _, _, _ := setupBenchmark(b) + typeFieldMetrics := TypeFieldMetrics(GetTypeFieldUsageInfo(generatedPlan)) + + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + result := typeFieldMetrics.IntoGraphQLMetrics() + _ = result // Prevent compiler optimization + } +} + +// BenchmarkSchemaUsageEndToEnd measures total memory allocations for complete schema usage extraction +// This simulates a full request lifecycle for schema usage tracking +func BenchmarkSchemaUsageEndToEnd(b *testing.B) { + generatedPlan, operation, definition, variables := setupBenchmark(b) + + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + // Extract type field usage + typeFieldUsage := GetTypeFieldUsageInfo(generatedPlan) + + // Convert to GraphQL metrics format + _ = TypeFieldMetrics(typeFieldUsage).IntoGraphQLMetrics() + + // Extract argument usage + argUsage, err := GetArgumentUsageInfo(operation, definition, generatedPlan) + if err != nil { + b.Fatal(err) + } + _ = argUsage + + // Extract input variable usage + inputUsage, err := GetInputUsageInfo(operation, definition, variables, generatedPlan, nil) + if err != nil { + b.Fatal(err) + } + _ = inputUsage + } +} + +// setupLargeFieldsBenchmark creates a schema and query with many unique fields +// to test schema usage efficiency at scale +func setupLargeFieldsBenchmark(b *testing.B, fieldCount int) (plan.Plan, *ast.Document, *ast.Document, *astjson.Value) { + b.Helper() + + // Generate schema with many fields + schemaBuilder := ` + type Query { + user(id: ID!): User + } + + type User { + id: ID! + name: String! + ` + + // Add many scalar fields + for i := 0; i < fieldCount; i++ { + fieldName := fmt.Sprintf("field%d", i) + schemaBuilder += "\n\t\t\t" + fieldName + ": String" + } + + schemaBuilder += "\n\t\t}" + + // Generate query selecting all fields + queryBuilder := "query GetUser($id: ID!) {\n\t\tuser(id: $id) {\n\t\t\tid\n\t\t\tname\n" + for i := 0; i < fieldCount; i++ { + fieldName := fmt.Sprintf("field%d", i) + queryBuilder += "\t\t\t" + fieldName + "\n" + } + queryBuilder += "\t\t}\n\t}" + + variables := `{"id":"123"}` + + // Parse schema + def, rep := astparser.ParseGraphqlDocumentString(schemaBuilder) + require.False(b, rep.HasErrors()) + + // Parse operation + op, rep := astparser.ParseGraphqlDocumentString(queryBuilder) + require.False(b, rep.HasErrors()) + + // Merge and normalize + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(b, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(b, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(b, report.HasErrors()) + + // Build field names list for metadata + fieldNames := []string{"id", "name"} + for i := 0; i < fieldCount; i++ { + fieldName := fmt.Sprintf("field%d", i) + fieldNames = append(fieldNames, fieldName) + } + + // Create data source configuration + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "https://api.example.com", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"user"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "User", FieldNames: fieldNames}, + }, + }, + nil, + ) + require.NoError(b, err) + + // Create planner + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(b, err) + + // Generate plan + generatedPlan := planner.Plan(&op, &def, "GetUser", report) + require.False(b, report.HasErrors()) + + // Parse variables + vars, err := astjson.Parse(variables) + require.NoError(b, err) + + return generatedPlan, &op, &def, vars +} + +// BenchmarkSchemaUsageWithManyFields tests performance with varying numbers of unique fields +// This helps identify O(n²) bottlenecks in duplicate detection and path allocation +func BenchmarkSchemaUsageWithManyFields(b *testing.B) { + testCases := []struct { + name string + fieldCount int + }{ + {"10_fields", 10}, + {"50_fields", 50}, + {"100_fields", 100}, + {"250_fields", 250}, + {"500_fields", 500}, + } + + for _, tc := range testCases { + b.Run(tc.name, func(b *testing.B) { + generatedPlan, operation, definition, variables := setupLargeFieldsBenchmark(b, tc.fieldCount) + + b.ResetTimer() + b.ReportAllocs() + + for i := 0; i < b.N; i++ { + // Extract type field usage + typeFieldUsage := GetTypeFieldUsageInfo(generatedPlan) + + // Extract argument usage + argUsage, err := GetArgumentUsageInfo(operation, definition, generatedPlan) + if err != nil { + b.Fatal(err) + } + + // Extract input variable usage + inputUsage, err := GetInputUsageInfo(operation, definition, variables, generatedPlan, nil) + if err != nil { + b.Fatal(err) + } + + _ = typeFieldUsage + _ = argUsage + _ = inputUsage + } + }) + } +} From 613829ace912a56dad7c182ed983d8b7510063b1 Mon Sep 17 00:00:00 2001 From: StarpTech Date: Fri, 21 Nov 2025 22:37:24 +0100 Subject: [PATCH 04/22] chore: implement advanced schema usage --- graphqlmetrics/core/metrics_service.go | 15 +- .../graphqlmetrics/v1/graphqlmetrics.pb.go | 51 +- ...1120223520_add_is_null_to_schema_usage.sql | 7 + ...529_add_is_null_to_schema_usage_5m_90d.sql | 7 + ...dd_is_null_to_schema_usage_lite_1d_90d.sql | 7 + ...gql_schema_usage_5m_90d_mv_for_is_null.sql | 50 + ...ql_schema_usage_5m_90d_mv_with_is_null.sql | 52 + ...gql_schema_usage_1d_90d_mv_for_is_null.sql | 26 + ...ql_schema_usage_1d_90d_mv_with_is_null.sql | 27 + .../graphqlmetrics/v1/graphqlmetrics.proto | 6 + router/core/operation_planner.go | 2 +- .../graphqlmetrics/v1/graphqlmetrics.pb.go | 51 +- router/pkg/graphqlschemausage/schemausage.go | 912 +++++++++--- .../schemausage_bench_test.go | 9 +- .../graphqlschemausage/schemausage_test.go | 1238 ++++++++++++++++- 15 files changed, 2201 insertions(+), 259 deletions(-) create mode 100644 graphqlmetrics/migrations/20251120223520_add_is_null_to_schema_usage.sql create mode 100644 graphqlmetrics/migrations/20251120223529_add_is_null_to_schema_usage_5m_90d.sql create mode 100644 graphqlmetrics/migrations/20251120223537_add_is_null_to_schema_usage_lite_1d_90d.sql create mode 100644 graphqlmetrics/migrations/20251120223910_drop_gql_schema_usage_5m_90d_mv_for_is_null.sql create mode 100644 graphqlmetrics/migrations/20251120223951_recreate_gql_schema_usage_5m_90d_mv_with_is_null.sql create mode 100644 graphqlmetrics/migrations/20251120224005_drop_gql_schema_usage_1d_90d_mv_for_is_null.sql create mode 100644 graphqlmetrics/migrations/20251120224017_recreate_gql_schema_usage_1d_90d_mv_with_is_null.sql diff --git a/graphqlmetrics/core/metrics_service.go b/graphqlmetrics/core/metrics_service.go index 9a14991d4e..e7c97ae81b 100644 --- a/graphqlmetrics/core/metrics_service.go +++ b/graphqlmetrics/core/metrics_service.go @@ -338,18 +338,17 @@ func (s *MetricsService) appendUsageMetrics( strconv.FormatInt(int64(schemaUsage.RequestInfo.StatusCode), 10), schemaUsage.RequestInfo.Error, fieldUsage.SubgraphIDs, - false, - false, + false, // IsArgument + false, // IsInput schemaUsage.Attributes, fieldUsage.IndirectInterfaceField, + false, // IsNull - not applicable for field metrics ) if err != nil { return fmt.Errorf("failed to append field metric to batch: %w", err) } } - fmt.Println(schemaUsage.OperationInfo.Name) - for _, argumentUsage := range schemaUsage.ArgumentMetrics { // Sort stable for fields where the order doesn't matter @@ -376,10 +375,11 @@ func (s *MetricsService) appendUsageMetrics( strconv.FormatInt(int64(schemaUsage.RequestInfo.StatusCode), 10), schemaUsage.RequestInfo.Error, argumentUsage.SubgraphIDs, - true, - false, + true, // IsArgument + false, // IsInput schemaUsage.Attributes, - false, + false, // IsIndirectFieldUsage + argumentUsage.IsNull, ) if err != nil { return fmt.Errorf("failed to append argument metric to batch: %w", err) @@ -416,6 +416,7 @@ func (s *MetricsService) appendUsageMetrics( true, schemaUsage.Attributes, false, + inputUsage.IsNull, ) if err != nil { return fmt.Errorf("failed to append input metric to batch: %w", err) diff --git a/graphqlmetrics/gen/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.pb.go b/graphqlmetrics/gen/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.pb.go index d52274c371..18cae56548 100644 --- a/graphqlmetrics/gen/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.pb.go +++ b/graphqlmetrics/gen/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.pb.go @@ -581,6 +581,9 @@ type ArgumentUsageInfo struct { NamedType string `protobuf:"bytes,4,opt,name=NamedType,proto3" json:"NamedType,omitempty"` // SubgraphIDs is the list of datasource IDs (e.g subgraph ID) that the argument is used from SubgraphIDs []string `protobuf:"bytes,5,rep,name=SubgraphIDs,proto3" json:"SubgraphIDs,omitempty"` + // IsNull indicates whether this argument was explicitly set to null + // This is critical for detecting breaking changes when optional arguments become required + IsNull bool `protobuf:"varint,6,opt,name=IsNull,proto3" json:"IsNull,omitempty"` } func (x *ArgumentUsageInfo) Reset() { @@ -650,6 +653,13 @@ func (x *ArgumentUsageInfo) GetSubgraphIDs() []string { return nil } +func (x *ArgumentUsageInfo) GetIsNull() bool { + if x != nil { + return x.IsNull + } + return false +} + type InputUsageInfo struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -667,6 +677,9 @@ type InputUsageInfo struct { EnumValues []string `protobuf:"bytes,5,rep,name=EnumValues,proto3" json:"EnumValues,omitempty"` // SubgraphIDs is the list of datasource IDs (e.g subgraph ID) that the input is used from SubgraphIDs []string `protobuf:"bytes,6,rep,name=SubgraphIDs,proto3" json:"SubgraphIDs,omitempty"` + // IsNull indicates whether this input was explicitly or implicitly null + // This is critical for detecting breaking changes when optional fields become required + IsNull bool `protobuf:"varint,7,opt,name=IsNull,proto3" json:"IsNull,omitempty"` } func (x *InputUsageInfo) Reset() { @@ -743,6 +756,13 @@ func (x *InputUsageInfo) GetSubgraphIDs() []string { return nil } +func (x *InputUsageInfo) GetIsNull() bool { + if x != nil { + return x.IsNull + } + return false +} + type PublishGraphQLRequestMetricsRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -1012,7 +1032,7 @@ var file_wg_cosmo_graphqlmetrics_v1_graphqlmetrics_proto_rawDesc = []byte{ 0x72, 0x65, 0x63, 0x74, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x16, 0x49, 0x6e, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x46, 0x69, 0x65, 0x6c, 0x64, - 0x22, 0x99, 0x01, 0x0a, 0x11, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x55, 0x73, 0x61, + 0x22, 0xb1, 0x01, 0x0a, 0x11, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x55, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x50, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x50, 0x61, 0x74, 0x68, 0x12, 0x1a, 0x0a, 0x08, 0x54, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x54, 0x79, @@ -1021,19 +1041,22 @@ var file_wg_cosmo_graphqlmetrics_v1_graphqlmetrics_proto_rawDesc = []byte{ 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x49, 0x44, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, - 0x0b, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x49, 0x44, 0x73, 0x22, 0xb6, 0x01, 0x0a, - 0x0e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x55, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, - 0x12, 0x0a, 0x04, 0x50, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x50, - 0x61, 0x74, 0x68, 0x12, 0x1a, 0x0a, 0x08, 0x54, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x54, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, - 0x14, 0x0a, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, - 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, - 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, - 0x79, 0x70, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x45, 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, 0x65, - 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x45, 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x73, 0x12, 0x20, 0x0a, 0x0b, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x49, - 0x44, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0b, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, - 0x70, 0x68, 0x49, 0x44, 0x73, 0x22, 0x74, 0x0a, 0x23, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, + 0x0b, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x49, 0x44, 0x73, 0x12, 0x16, 0x0a, 0x06, + 0x49, 0x73, 0x4e, 0x75, 0x6c, 0x6c, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x49, 0x73, + 0x4e, 0x75, 0x6c, 0x6c, 0x22, 0xce, 0x01, 0x0a, 0x0e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x55, 0x73, + 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x50, 0x61, 0x74, 0x68, 0x18, + 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x50, 0x61, 0x74, 0x68, 0x12, 0x1a, 0x0a, 0x08, 0x54, + 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x54, + 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x1c, 0x0a, + 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x45, + 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, + 0x0a, 0x45, 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x20, 0x0a, 0x0b, 0x53, + 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x49, 0x44, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x09, + 0x52, 0x0b, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x49, 0x44, 0x73, 0x12, 0x16, 0x0a, + 0x06, 0x49, 0x73, 0x4e, 0x75, 0x6c, 0x6c, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x49, + 0x73, 0x4e, 0x75, 0x6c, 0x6c, 0x22, 0x74, 0x0a, 0x23, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x4d, 0x0a, 0x0b, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x55, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x03, 0x28, diff --git a/graphqlmetrics/migrations/20251120223520_add_is_null_to_schema_usage.sql b/graphqlmetrics/migrations/20251120223520_add_is_null_to_schema_usage.sql new file mode 100644 index 0000000000..36885152aa --- /dev/null +++ b/graphqlmetrics/migrations/20251120223520_add_is_null_to_schema_usage.sql @@ -0,0 +1,7 @@ +-- migrate:up + +ALTER TABLE gql_metrics_schema_usage ADD COLUMN IF NOT EXISTS IsNull bool DEFAULT false CODEC(ZSTD(3)); + +-- migrate:down + +ALTER TABLE gql_metrics_schema_usage DROP COLUMN IF EXISTS IsNull; diff --git a/graphqlmetrics/migrations/20251120223529_add_is_null_to_schema_usage_5m_90d.sql b/graphqlmetrics/migrations/20251120223529_add_is_null_to_schema_usage_5m_90d.sql new file mode 100644 index 0000000000..c77604f333 --- /dev/null +++ b/graphqlmetrics/migrations/20251120223529_add_is_null_to_schema_usage_5m_90d.sql @@ -0,0 +1,7 @@ +-- migrate:up + +ALTER TABLE gql_metrics_schema_usage_5m_90d ADD COLUMN IF NOT EXISTS IsNull bool DEFAULT false CODEC(ZSTD(3)); + +-- migrate:down + +ALTER TABLE gql_metrics_schema_usage_5m_90d DROP COLUMN IF EXISTS IsNull; diff --git a/graphqlmetrics/migrations/20251120223537_add_is_null_to_schema_usage_lite_1d_90d.sql b/graphqlmetrics/migrations/20251120223537_add_is_null_to_schema_usage_lite_1d_90d.sql new file mode 100644 index 0000000000..b5f4445867 --- /dev/null +++ b/graphqlmetrics/migrations/20251120223537_add_is_null_to_schema_usage_lite_1d_90d.sql @@ -0,0 +1,7 @@ +-- migrate:up + +ALTER TABLE gql_metrics_schema_usage_lite_1d_90d ADD COLUMN IF NOT EXISTS IsNull bool DEFAULT false CODEC(ZSTD(3)); + +-- migrate:down + +ALTER TABLE gql_metrics_schema_usage_lite_1d_90d DROP COLUMN IF EXISTS IsNull; diff --git a/graphqlmetrics/migrations/20251120223910_drop_gql_schema_usage_5m_90d_mv_for_is_null.sql b/graphqlmetrics/migrations/20251120223910_drop_gql_schema_usage_5m_90d_mv_for_is_null.sql new file mode 100644 index 0000000000..b8842304be --- /dev/null +++ b/graphqlmetrics/migrations/20251120223910_drop_gql_schema_usage_5m_90d_mv_for_is_null.sql @@ -0,0 +1,50 @@ +-- migrate:up + +DROP VIEW IF EXISTS gql_metrics_schema_usage_5m_90d_mv; + +-- migrate:down + +CREATE MATERIALIZED VIEW IF NOT EXISTS gql_metrics_schema_usage_5m_90d_mv TO gql_metrics_schema_usage_5m_90d AS +SELECT + toStartOfFiveMinute(Timestamp) as Timestamp, + toLowCardinality(OrganizationID) as OrganizationID, + toLowCardinality(FederatedGraphID) as FederatedGraphID, + toLowCardinality(RouterConfigVersion) as RouterConfigVersion, + toLowCardinality(OperationHash) as OperationHash, + toLowCardinality(OperationName) as OperationName, + toLowCardinality(OperationType) as OperationType, + Path as Path, + toLowCardinality(arrayElement(Path, -1)) as FieldName, + TypeNames as TypeNames, + toLowCardinality(NamedType) as NamedType, + toLowCardinality(ClientName) as ClientName, + toLowCardinality(ClientVersion) as ClientVersion, + SubgraphIDs as SubgraphIDs, + IsArgument as IsArgument, + IsInput as IsInput, + sum(Count) as TotalUsages, + sumIf(Count, HasError OR position(HttpStatusCode,'5') = 1 OR position(HttpStatusCode,'4') = 1) as TotalErrors, + sumIf(Count, position(HttpStatusCode,'4') = 1) AS TotalClientErrors, + IsIndirectFieldUsage as IsIndirectFieldUsage +FROM gql_metrics_schema_usage +GROUP BY + Timestamp, + OperationHash, + OperationName, + OperationType, + FederatedGraphID, + RouterConfigVersion, + OrganizationID, + OperationType, + ClientName, + ClientVersion, + Path, + FieldName, + NamedType, + TypeNames, + SubgraphIDs, + IsArgument, + IsInput, + IsIndirectFieldUsage +ORDER BY + Timestamp; diff --git a/graphqlmetrics/migrations/20251120223951_recreate_gql_schema_usage_5m_90d_mv_with_is_null.sql b/graphqlmetrics/migrations/20251120223951_recreate_gql_schema_usage_5m_90d_mv_with_is_null.sql new file mode 100644 index 0000000000..84e34366d4 --- /dev/null +++ b/graphqlmetrics/migrations/20251120223951_recreate_gql_schema_usage_5m_90d_mv_with_is_null.sql @@ -0,0 +1,52 @@ +-- migrate:up + +CREATE MATERIALIZED VIEW IF NOT EXISTS gql_metrics_schema_usage_5m_90d_mv TO gql_metrics_schema_usage_5m_90d AS +SELECT + toStartOfFiveMinute(Timestamp) as Timestamp, + toLowCardinality(OrganizationID) as OrganizationID, + toLowCardinality(FederatedGraphID) as FederatedGraphID, + toLowCardinality(RouterConfigVersion) as RouterConfigVersion, + toLowCardinality(OperationHash) as OperationHash, + toLowCardinality(OperationName) as OperationName, + toLowCardinality(OperationType) as OperationType, + Path as Path, + toLowCardinality(arrayElement(Path, -1)) as FieldName, + TypeNames as TypeNames, + toLowCardinality(NamedType) as NamedType, + toLowCardinality(ClientName) as ClientName, + toLowCardinality(ClientVersion) as ClientVersion, + SubgraphIDs as SubgraphIDs, + IsArgument as IsArgument, + IsInput as IsInput, + sum(Count) as TotalUsages, + sumIf(Count, HasError OR position(HttpStatusCode,'5') = 1 OR position(HttpStatusCode,'4') = 1) as TotalErrors, + sumIf(Count, position(HttpStatusCode,'4') = 1) AS TotalClientErrors, + IsIndirectFieldUsage as IsIndirectFieldUsage, + IsNull as IsNull +FROM gql_metrics_schema_usage +GROUP BY + Timestamp, + OperationHash, + OperationName, + OperationType, + FederatedGraphID, + RouterConfigVersion, + OrganizationID, + OperationType, + ClientName, + ClientVersion, + Path, + FieldName, + NamedType, + TypeNames, + SubgraphIDs, + IsArgument, + IsInput, + IsIndirectFieldUsage, + IsNull +ORDER BY + Timestamp; + +-- migrate:down + +DROP VIEW IF EXISTS gql_metrics_schema_usage_5m_90d_mv; diff --git a/graphqlmetrics/migrations/20251120224005_drop_gql_schema_usage_1d_90d_mv_for_is_null.sql b/graphqlmetrics/migrations/20251120224005_drop_gql_schema_usage_1d_90d_mv_for_is_null.sql new file mode 100644 index 0000000000..adf13ebf25 --- /dev/null +++ b/graphqlmetrics/migrations/20251120224005_drop_gql_schema_usage_1d_90d_mv_for_is_null.sql @@ -0,0 +1,26 @@ +-- migrate:up + +DROP VIEW IF EXISTS gql_metrics_schema_usage_lite_1d_90d_mv; + +-- migrate:down + +CREATE MATERIALIZED VIEW IF NOT EXISTS gql_metrics_schema_usage_lite_1d_90d_mv TO gql_metrics_schema_usage_lite_1d_90d AS +SELECT + toStartOfDay(Timestamp) as Timestamp, + toLowCardinality(OrganizationID) as OrganizationID, + toLowCardinality(FederatedGraphID) as FederatedGraphID, + toLowCardinality(RouterConfigVersion) as RouterConfigVersion, + toLowCardinality(OperationHash) as OperationHash, + toLowCardinality(OperationName) as OperationName, + toLowCardinality(OperationType) as OperationType, + Path as Path, + toLowCardinality(arrayElement(Path, -1)) as FieldName, + TypeNames as TypeNames, + toLowCardinality(NamedType) as NamedType, + toLowCardinality(ClientName) as ClientName, + toLowCardinality(ClientVersion) as ClientVersion, + SubgraphIDs as SubgraphIDs, + IsArgument as IsArgument, + IsInput as IsInput, + IsIndirectFieldUsage as IsIndirectFieldUsage +FROM gql_metrics_schema_usage; diff --git a/graphqlmetrics/migrations/20251120224017_recreate_gql_schema_usage_1d_90d_mv_with_is_null.sql b/graphqlmetrics/migrations/20251120224017_recreate_gql_schema_usage_1d_90d_mv_with_is_null.sql new file mode 100644 index 0000000000..3f44a09646 --- /dev/null +++ b/graphqlmetrics/migrations/20251120224017_recreate_gql_schema_usage_1d_90d_mv_with_is_null.sql @@ -0,0 +1,27 @@ +-- migrate:up + +CREATE MATERIALIZED VIEW IF NOT EXISTS gql_metrics_schema_usage_lite_1d_90d_mv TO gql_metrics_schema_usage_lite_1d_90d AS +SELECT + toStartOfDay(Timestamp) as Timestamp, + toLowCardinality(OrganizationID) as OrganizationID, + toLowCardinality(FederatedGraphID) as FederatedGraphID, + toLowCardinality(RouterConfigVersion) as RouterConfigVersion, + toLowCardinality(OperationHash) as OperationHash, + toLowCardinality(OperationName) as OperationName, + toLowCardinality(OperationType) as OperationType, + Path as Path, + toLowCardinality(arrayElement(Path, -1)) as FieldName, + TypeNames as TypeNames, + toLowCardinality(NamedType) as NamedType, + toLowCardinality(ClientName) as ClientName, + toLowCardinality(ClientVersion) as ClientVersion, + SubgraphIDs as SubgraphIDs, + IsArgument as IsArgument, + IsInput as IsInput, + IsIndirectFieldUsage as IsIndirectFieldUsage, + IsNull as IsNull +FROM gql_metrics_schema_usage; + +-- migrate:down + +DROP VIEW IF EXISTS gql_metrics_schema_usage_lite_1d_90d_mv; diff --git a/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.proto b/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.proto index 6aeb7a3be4..5567ce6d1a 100644 --- a/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.proto +++ b/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.proto @@ -91,6 +91,9 @@ message ArgumentUsageInfo { string NamedType = 4; // SubgraphIDs is the list of datasource IDs (e.g subgraph ID) that the argument is used from repeated string SubgraphIDs = 5; + // IsNull indicates whether this argument was explicitly set to null + // This is critical for detecting breaking changes when optional arguments become required + bool IsNull = 6; } message InputUsageInfo { @@ -106,6 +109,9 @@ message InputUsageInfo { repeated string EnumValues = 5; // SubgraphIDs is the list of datasource IDs (e.g subgraph ID) that the input is used from repeated string SubgraphIDs = 6; + // IsNull indicates whether this input was explicitly or implicitly null + // This is critical for detecting breaking changes when optional fields become required + bool IsNull = 7; } message PublishGraphQLRequestMetricsRequest { diff --git a/router/core/operation_planner.go b/router/core/operation_planner.go index ec58b45091..38c3b6aac5 100644 --- a/router/core/operation_planner.go +++ b/router/core/operation_planner.go @@ -83,7 +83,7 @@ func (p *OperationPlanner) preparePlan(ctx *operationContext) (*planWithMetaData if p.trackUsageInfo { out.typeFieldUsageInfo = graphqlschemausage.GetTypeFieldUsageInfo(preparedPlan) - out.argumentUsageInfo, err = graphqlschemausage.GetArgumentUsageInfo(&doc, p.executor.RouterSchema, preparedPlan) + out.argumentUsageInfo, err = graphqlschemausage.GetArgumentUsageInfo(&doc, p.executor.RouterSchema, ctx.variables, preparedPlan, ctx.remapVariables) if err != nil { return nil, err } diff --git a/router/gen/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.pb.go b/router/gen/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.pb.go index 5a000c7050..7ff1ce41e2 100644 --- a/router/gen/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.pb.go +++ b/router/gen/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.pb.go @@ -581,6 +581,9 @@ type ArgumentUsageInfo struct { NamedType string `protobuf:"bytes,4,opt,name=NamedType,proto3" json:"NamedType,omitempty"` // SubgraphIDs is the list of datasource IDs (e.g subgraph ID) that the argument is used from SubgraphIDs []string `protobuf:"bytes,5,rep,name=SubgraphIDs,proto3" json:"SubgraphIDs,omitempty"` + // IsNull indicates whether this argument was explicitly set to null + // This is critical for detecting breaking changes when optional arguments become required + IsNull bool `protobuf:"varint,6,opt,name=IsNull,proto3" json:"IsNull,omitempty"` } func (x *ArgumentUsageInfo) Reset() { @@ -650,6 +653,13 @@ func (x *ArgumentUsageInfo) GetSubgraphIDs() []string { return nil } +func (x *ArgumentUsageInfo) GetIsNull() bool { + if x != nil { + return x.IsNull + } + return false +} + type InputUsageInfo struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -667,6 +677,9 @@ type InputUsageInfo struct { EnumValues []string `protobuf:"bytes,5,rep,name=EnumValues,proto3" json:"EnumValues,omitempty"` // SubgraphIDs is the list of datasource IDs (e.g subgraph ID) that the input is used from SubgraphIDs []string `protobuf:"bytes,6,rep,name=SubgraphIDs,proto3" json:"SubgraphIDs,omitempty"` + // IsNull indicates whether this input was explicitly or implicitly null + // This is critical for detecting breaking changes when optional fields become required + IsNull bool `protobuf:"varint,7,opt,name=IsNull,proto3" json:"IsNull,omitempty"` } func (x *InputUsageInfo) Reset() { @@ -743,6 +756,13 @@ func (x *InputUsageInfo) GetSubgraphIDs() []string { return nil } +func (x *InputUsageInfo) GetIsNull() bool { + if x != nil { + return x.IsNull + } + return false +} + type PublishGraphQLRequestMetricsRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -1012,7 +1032,7 @@ var file_wg_cosmo_graphqlmetrics_v1_graphqlmetrics_proto_rawDesc = []byte{ 0x72, 0x65, 0x63, 0x74, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x16, 0x49, 0x6e, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x46, 0x69, 0x65, 0x6c, 0x64, - 0x22, 0x99, 0x01, 0x0a, 0x11, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x55, 0x73, 0x61, + 0x22, 0xb1, 0x01, 0x0a, 0x11, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x55, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x50, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x50, 0x61, 0x74, 0x68, 0x12, 0x1a, 0x0a, 0x08, 0x54, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x54, 0x79, @@ -1021,19 +1041,22 @@ var file_wg_cosmo_graphqlmetrics_v1_graphqlmetrics_proto_rawDesc = []byte{ 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x49, 0x44, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, - 0x0b, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x49, 0x44, 0x73, 0x22, 0xb6, 0x01, 0x0a, - 0x0e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x55, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, - 0x12, 0x0a, 0x04, 0x50, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x50, - 0x61, 0x74, 0x68, 0x12, 0x1a, 0x0a, 0x08, 0x54, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x54, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, - 0x14, 0x0a, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, - 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, - 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, - 0x79, 0x70, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x45, 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, 0x65, - 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x45, 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x73, 0x12, 0x20, 0x0a, 0x0b, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x49, - 0x44, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0b, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, - 0x70, 0x68, 0x49, 0x44, 0x73, 0x22, 0x74, 0x0a, 0x23, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, + 0x0b, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x49, 0x44, 0x73, 0x12, 0x16, 0x0a, 0x06, + 0x49, 0x73, 0x4e, 0x75, 0x6c, 0x6c, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x49, 0x73, + 0x4e, 0x75, 0x6c, 0x6c, 0x22, 0xce, 0x01, 0x0a, 0x0e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x55, 0x73, + 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x50, 0x61, 0x74, 0x68, 0x18, + 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x50, 0x61, 0x74, 0x68, 0x12, 0x1a, 0x0a, 0x08, 0x54, + 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x54, + 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x1c, 0x0a, + 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x45, + 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, + 0x0a, 0x45, 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x20, 0x0a, 0x0b, 0x53, + 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x49, 0x44, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x09, + 0x52, 0x0b, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x49, 0x44, 0x73, 0x12, 0x16, 0x0a, + 0x06, 0x49, 0x73, 0x4e, 0x75, 0x6c, 0x6c, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x49, + 0x73, 0x4e, 0x75, 0x6c, 0x6c, 0x22, 0x74, 0x0a, 0x23, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x4d, 0x0a, 0x0b, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x55, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x03, 0x28, diff --git a/router/pkg/graphqlschemausage/schemausage.go b/router/pkg/graphqlschemausage/schemausage.go index 59ab5db06b..7468b2a9a9 100644 --- a/router/pkg/graphqlschemausage/schemausage.go +++ b/router/pkg/graphqlschemausage/schemausage.go @@ -23,11 +23,29 @@ // 2. ARGUMENT: Correlate AST arguments with plan field paths // 3. INPUT: Build field→subgraph and variable→subgraph maps, then traverse variable values // -// Special handling: Variable remapping for normalized operations (e.g., $a → $criteria), -// null value skipping (nulls don't represent actual usage). +// # Input Null Tracking +// +// Input fields are ALWAYS tracked, even when null (explicit or implicit). This is critical for +// detecting breaking changes when optional fields become required. Each input usage includes an +// IsNull flag to indicate null propagation. When an input is null, the chain stops there—nested +// fields are not traversed since the parent is null. +// +// # Design Components +// +// The package uses dependency injection and separation of concerns: +// +// - pathBuilder: Reusable path stack operations for field traversal +// - nullValueDetector: Centralized null detection for values and variables with remapping support +// - subgraphMapper: Unified interface for field and variable → subgraph ID resolution +// - inputTypeResolver: Type system queries for input object field definitions +// - inputTraverser: Input traversal with implicit null tracking +// +// These components are composed by visitor types to provide clean, testable, and maintainable +// schema usage extraction. package graphqlschemausage import ( + "bytes" "strings" "github.com/wundergraph/astjson" @@ -40,6 +58,11 @@ import ( graphqlmetrics "github.com/wundergraph/cosmo/router/gen/proto/wg/cosmo/graphqlmetrics/v1" ) +// ============================================ +// Public API +// ============================================ + +// GetTypeFieldUsageInfo extracts type and field usage from the execution plan. func GetTypeFieldUsageInfo(operationPlan plan.Plan) []*TypeFieldUsageInfo { visitor := typeFieldUsageInfoVisitor{} switch p := operationPlan.(type) { @@ -51,12 +74,65 @@ func GetTypeFieldUsageInfo(operationPlan plan.Plan) []*TypeFieldUsageInfo { return visitor.typeFieldUsageInfo } +// GetArgumentUsageInfo extracts argument usage by correlating AST arguments with execution plan +// field paths. Includes null tracking for both inline and variable-based argument values. +func GetArgumentUsageInfo(operation, definition *ast.Document, variables *astjson.Value, operationPlan plan.Plan, remapVariables map[string]string) ([]*graphqlmetrics.ArgumentUsageInfo, error) { + subgraphMapper := newSubgraphMapper(operationPlan, operation, definition) + nullDetector := newNullValueDetector(operation, variables, remapVariables) + + walker := astvisitor.NewWalker(48) + visitor := &argumentUsageInfoVisitor{ + definition: definition, + operation: operation, + walker: &walker, + subgraphMapper: subgraphMapper, + nullDetector: nullDetector, + pathBuilder: newPathBuilder(8), + usage: make([]*graphqlmetrics.ArgumentUsageInfo, 0, 16), + currentFieldRef: -1, + providedArgumentsStack: make([]map[string]struct{}, 0, 8), + fieldEnclosingNodeStack: make([]ast.Node, 0, 8), + } + walker.RegisterEnterArgumentVisitor(visitor) + walker.RegisterEnterFieldVisitor(visitor) + walker.RegisterLeaveFieldVisitor(visitor) + rep := &operationreport.Report{} + walker.Walk(operation, definition, rep) + if rep.HasErrors() { + return nil, rep + } + + return visitor.usage, nil +} + +// GetInputUsageInfo extracts input usage by traversing variable values. Tracks both explicit +// nulls ({"field": null}) and implicit nulls (missing fields) for breaking change detection. +// Also tracks input usage for implicitly null input type arguments (arguments not provided). +func GetInputUsageInfo(operation, definition *ast.Document, variables *astjson.Value, operationPlan plan.Plan, remapVariables map[string]string) ([]*graphqlmetrics.InputUsageInfo, error) { + subgraphMapper := newSubgraphMapper(operationPlan, operation, definition) + traverser := newInputTraverser(definition, subgraphMapper) + nullDetector := newNullValueDetector(operation, variables, remapVariables) + + // Track input usage from variable definitions + for i := range operation.VariableDefinitions { + processVariableDefinition(traverser, operation, variables, nullDetector, i) + } + + // Track input usage from implicitly null input type arguments + collectImplicitArgumentInputUsage(operation, definition, subgraphMapper, traverser) + + return traverser.usage, nil +} + +// ============================================ +// Type Field Usage +// ============================================ + // An array of TypeFieldUsageInfo, with a method to convert it into a []*graphqlmetrics.TypeFieldUsageInfo type TypeFieldMetrics []*TypeFieldUsageInfo // IntoGraphQLMetrics converts the TypeFieldMetrics into a []*graphqlmetrics.TypeFieldUsageInfo func (t TypeFieldMetrics) IntoGraphQLMetrics() []*graphqlmetrics.TypeFieldUsageInfo { - // Pre-allocate slice with exact capacity metrics := make([]*graphqlmetrics.TypeFieldUsageInfo, len(t)) for i, info := range t { metrics[i] = info.IntoGraphQLMetrics() @@ -94,10 +170,7 @@ type typeFieldUsageInfoVisitor struct { func (p *typeFieldUsageInfoVisitor) visitNode(node resolve.Node, path []string) { switch t := node.(type) { case *resolve.Object: - // Pre-allocate the typeFieldUsageInfo slice with a reasonable capacity - // to reduce allocations during traversal if p.typeFieldUsageInfo == nil { - // Estimate: average query has ~20-50 fields p.typeFieldUsageInfo = make([]*TypeFieldUsageInfo, 0, 32) } @@ -106,7 +179,6 @@ func (p *typeFieldUsageInfoVisitor) visitNode(node resolve.Node, path []string) continue } - // create a new slice with exact capacity and copy elements pathCopy := make([]string, len(path)+1) copy(pathCopy, path) pathCopy[len(path)] = field.Info.Name @@ -134,12 +206,122 @@ func (p *typeFieldUsageInfoVisitor) visitNode(node resolve.Node, path []string) } } +// ============================================ +// Path Builder (Shared Infrastructure) +// ============================================ + +// pathBuilder provides reusable path stack operations for tracking field paths during traversal. +type pathBuilder struct { + stack []string +} + +func newPathBuilder(capacity int) *pathBuilder { + return &pathBuilder{stack: make([]string, 0, capacity)} +} + +func (p *pathBuilder) push(segment string) { + p.stack = append(p.stack, segment) +} + +func (p *pathBuilder) pop() { + if len(p.stack) > 0 { + p.stack = p.stack[:len(p.stack)-1] + } +} + +func (p *pathBuilder) copy() []string { + result := make([]string, len(p.stack)) + copy(result, p.stack) + return result +} + +func (p *pathBuilder) key() string { + return strings.Join(p.stack, ".") +} + +// ============================================ +// Null Value Detector (Shared Infrastructure) +// ============================================ + +// nullValueDetector handles null detection for inline values, variables, and name remapping. +type nullValueDetector struct { + operation *ast.Document + variables *astjson.Value + remapVariables map[string]string +} + +func newNullValueDetector(operation *ast.Document, variables *astjson.Value, remapVariables map[string]string) *nullValueDetector { + return &nullValueDetector{ + operation: operation, + variables: variables, + remapVariables: remapVariables, + } +} + +// isValueNull checks if an argument/variable value is null +func (n *nullValueDetector) isValueNull(value ast.Value) bool { + if value.Kind == ast.ValueKindNull { + return true + } + + if value.Kind == ast.ValueKindVariable && n.variables != nil { + varName := n.operation.VariableValueNameString(value.Ref) + return n.isVariableNull(varName) + } + + return false +} + +// isVariableNull checks if a variable (by name) has a null value +func (n *nullValueDetector) isVariableNull(varName string) bool { + originalVarName := n.getOriginalVariableName(varName) + jsonField := n.variables.Get(originalVarName) + return jsonField != nil && jsonField.Type() == astjson.TypeNull +} + +// getOriginalVariableName maps normalized variable names back to originals +func (n *nullValueDetector) getOriginalVariableName(varName string) string { + if n.remapVariables != nil { + if remapped, exists := n.remapVariables[varName]; exists { + return remapped + } + } + return varName +} + +// ============================================ +// Subgraph Mapper (Shared Infrastructure) +// ============================================ + +// subgraphMapper maps field paths and variable names to their subgraph IDs. +type subgraphMapper struct { + fieldToSubgraphs map[string][]string + variableToSubgraphs map[string][]string +} + +func newSubgraphMapper(operationPlan plan.Plan, operation, definition *ast.Document) *subgraphMapper { + mapper := &subgraphMapper{ + fieldToSubgraphs: buildFieldSubgraphIDMap(operationPlan), + } + mapper.variableToSubgraphs = buildVariableSubgraphMap(operation, definition, mapper.fieldToSubgraphs) + return mapper +} + +// getFieldSubgraphs returns subgraph IDs for a field path +func (s *subgraphMapper) getFieldSubgraphs(pathKey string) []string { + return s.fieldToSubgraphs[pathKey] +} + +// getVariableSubgraphs returns subgraph IDs for a variable +func (s *subgraphMapper) getVariableSubgraphs(varName string) []string { + return s.variableToSubgraphs[varName] +} + // buildFieldSubgraphIDMap extracts field → subgraph mappings from the execution plan. -// Returns a map where keys are dot-separated paths (e.g., "user.orders") and values are subgraph IDs. func buildFieldSubgraphIDMap(operationPlan plan.Plan) map[string][]string { collector := &subgraphIDCollector{ fieldMap: make(map[string][]string), - pathStack: make([]string, 0, 8), // Pre-allocate for typical depth + pathStack: make([]string, 0, 8), } switch p := operationPlan.(type) { case *plan.SynchronousResponsePlan: @@ -152,7 +334,7 @@ func buildFieldSubgraphIDMap(operationPlan plan.Plan) map[string][]string { type subgraphIDCollector struct { fieldMap map[string][]string - pathStack []string // Reusable path stack to avoid allocations + pathStack []string } func (c *subgraphIDCollector) collectFromNode(node resolve.Node) { @@ -162,16 +344,10 @@ func (c *subgraphIDCollector) collectFromNode(node resolve.Node) { if field.Info == nil { continue } - // Push field name onto stack c.pathStack = append(c.pathStack, field.Info.Name) - - // Store the subgraph IDs for this field path - pathKey := pathToKey(c.pathStack) + pathKey := strings.Join(c.pathStack, ".") c.fieldMap[pathKey] = field.Info.Source.IDs - c.collectFromNode(field.Value) - - // Pop field name from stack c.pathStack = c.pathStack[:len(c.pathStack)-1] } case *resolve.Array: @@ -179,14 +355,7 @@ func (c *subgraphIDCollector) collectFromNode(node resolve.Node) { } } -// pathToKey converts a path slice to a string key for map lookups. -func pathToKey(path []string) string { - return strings.Join(path, ".") -} - // buildVariableSubgraphMap maps variable names to subgraph IDs by analyzing which fields use them. -// Walks the operation AST to find variable usage (e.g., user(id: $userId)), then looks up -// the field's subgraph IDs from fieldSubgraphMap. Merges IDs if a variable is used by multiple fields. func buildVariableSubgraphMap(operation, definition *ast.Document, fieldSubgraphMap map[string][]string) map[string][]string { variableMap := make(map[string][]string) walker := astvisitor.NewWalker(48) @@ -196,7 +365,7 @@ func buildVariableSubgraphMap(operation, definition *ast.Document, fieldSubgraph definition: definition, fieldSubgraphMap: fieldSubgraphMap, variableMap: variableMap, - currentPath: make([]string, 0, 8), + pathBuilder: newPathBuilder(8), } walker.RegisterEnterFieldVisitor(collector) walker.RegisterLeaveFieldVisitor(collector) @@ -212,28 +381,21 @@ type variableSubgraphCollector struct { definition *ast.Document fieldSubgraphMap map[string][]string variableMap map[string][]string - currentPath []string + pathBuilder *pathBuilder } -// EnterField tracks the current field path for argument processing. func (v *variableSubgraphCollector) EnterField(ref int) { fieldName := v.operation.FieldNameString(ref) - v.currentPath = append(v.currentPath, fieldName) + v.pathBuilder.push(fieldName) } -// LeaveField pops the field from the path when leaving. func (v *variableSubgraphCollector) LeaveField(_ int) { - if len(v.currentPath) > 0 { - v.currentPath = v.currentPath[:len(v.currentPath)-1] - } + v.pathBuilder.pop() } -// EnterArgument detects variable usage and associates variables with subgraph IDs. -// For user(id: $userId), maps "userId" → subgraph IDs of "user" field. func (v *variableSubgraphCollector) EnterArgument(ref int) { arg := v.operation.Arguments[ref] - // Only process arguments that use variables (not inline values) if arg.Value.Kind != ast.ValueKindVariable { return } @@ -243,19 +405,13 @@ func (v *variableSubgraphCollector) EnterArgument(ref int) { return } - // Get subgraph IDs for the current field path - if len(v.currentPath) > 0 { - pathKey := pathToKey(v.currentPath) - if subgraphIDs, exists := v.fieldSubgraphMap[pathKey]; exists { - // Merge subgraph IDs for this variable - // (in case the variable is used by multiple fields from different subgraphs) - v.variableMap[varName] = mergeSubgraphIDs(v.variableMap[varName], subgraphIDs) - } + pathKey := v.pathBuilder.key() + if subgraphIDs, exists := v.fieldSubgraphMap[pathKey]; exists { + v.variableMap[varName] = mergeSubgraphIDs(v.variableMap[varName], subgraphIDs) } } // mergeSubgraphIDs combines two slices of subgraph IDs, removing duplicates. -// Used when a variable is used by fields from different subgraphs. func mergeSubgraphIDs(a, b []string) []string { if len(a) == 0 { return b @@ -284,50 +440,45 @@ func mergeSubgraphIDs(a, b []string) []string { return result } -func GetArgumentUsageInfo(operation, definition *ast.Document, operationPlan plan.Plan) ([]*graphqlmetrics.ArgumentUsageInfo, error) { - // Build a mapping of field paths to their subgraph IDs from the plan - subgraphIDMap := buildFieldSubgraphIDMap(operationPlan) - - walker := astvisitor.NewWalker(48) - visitor := &argumentUsageInfoVisitor{ - definition: definition, - operation: operation, - walker: &walker, - subgraphIDMap: subgraphIDMap, - // Pre-allocate with reasonable capacity to reduce allocations - usage: make([]*graphqlmetrics.ArgumentUsageInfo, 0, 16), - } - walker.RegisterEnterArgumentVisitor(visitor) - walker.RegisterEnterFieldVisitor(visitor) - walker.RegisterLeaveFieldVisitor(visitor) - rep := &operationreport.Report{} - walker.Walk(operation, definition, rep) - if rep.HasErrors() { - return nil, rep - } - return visitor.usage, nil -} +// ============================================ +// Argument Usage Visitor +// ============================================ type argumentUsageInfoVisitor struct { - walker *astvisitor.Walker - definition, operation *ast.Document - fieldEnclosingNode ast.Node - subgraphIDMap map[string][]string - currentPath []string - usage []*graphqlmetrics.ArgumentUsageInfo + walker *astvisitor.Walker + definition *ast.Document + operation *ast.Document + fieldEnclosingNodeStack []ast.Node // Stack to track enclosing nodes for nested fields + subgraphMapper *subgraphMapper + nullDetector *nullValueDetector + pathBuilder *pathBuilder + usage []*graphqlmetrics.ArgumentUsageInfo + currentFieldRef int + providedArgumentsStack []map[string]struct{} // Stack of maps to track which arguments were provided at each level } func (a *argumentUsageInfoVisitor) EnterField(ref int) { - a.fieldEnclosingNode = a.walker.EnclosingTypeDefinition - // Track the current field path for subgraph ID lookup + // Push current enclosing node onto stack + a.fieldEnclosingNodeStack = append(a.fieldEnclosingNodeStack, a.walker.EnclosingTypeDefinition) + a.currentFieldRef = ref + // Push nil - will lazily allocate map only if field has arguments + a.providedArgumentsStack = append(a.providedArgumentsStack, nil) fieldName := a.operation.FieldNameString(ref) - a.currentPath = append(a.currentPath, fieldName) + a.pathBuilder.push(fieldName) } -func (a *argumentUsageInfoVisitor) LeaveField(_ int) { - // Remove the current field from the path when leaving - if len(a.currentPath) > 0 { - a.currentPath = a.currentPath[:len(a.currentPath)-1] +func (a *argumentUsageInfoVisitor) LeaveField(ref int) { + // Track implicit null arguments (arguments defined in schema but not provided in operation) + a.trackImplicitNullArguments(ref) + a.pathBuilder.pop() + a.currentFieldRef = -1 + // Pop the enclosing node from stack + if len(a.fieldEnclosingNodeStack) > 0 { + a.fieldEnclosingNodeStack = a.fieldEnclosingNodeStack[:len(a.fieldEnclosingNodeStack)-1] + } + // Pop the provided arguments map + if len(a.providedArgumentsStack) > 0 { + a.providedArgumentsStack = a.providedArgumentsStack[:len(a.providedArgumentsStack)-1] } } @@ -337,195 +488,351 @@ func (a *argumentUsageInfoVisitor) EnterArgument(ref int) { if anc.Kind != ast.NodeKindField { return } + + // Track that this argument was provided in the current field's map + // Lazily allocate map only when first argument is encountered + if len(a.providedArgumentsStack) > 0 { + stackIdx := len(a.providedArgumentsStack) - 1 + if a.providedArgumentsStack[stackIdx] == nil { + a.providedArgumentsStack[stackIdx] = make(map[string]struct{}, 4) // Capacity hint: most fields have 1-4 args + } + a.providedArgumentsStack[stackIdx][string(argName)] = struct{}{} + } + + // Get enclosing node from top of stack + if len(a.fieldEnclosingNodeStack) == 0 { + return + } + fieldEnclosingNode := a.fieldEnclosingNodeStack[len(a.fieldEnclosingNodeStack)-1] + fieldName := a.operation.FieldNameBytes(anc.Ref) - enclosingTypeName := a.definition.NodeNameBytes(a.fieldEnclosingNode) - argDef := a.definition.NodeFieldDefinitionArgumentDefinitionByName(a.fieldEnclosingNode, fieldName, argName) + enclosingTypeName := a.definition.NodeNameBytes(fieldEnclosingNode) + argDef := a.definition.NodeFieldDefinitionArgumentDefinitionByName(fieldEnclosingNode, fieldName, argName) if argDef == -1 { return } argType := a.definition.InputValueDefinitionType(argDef) typeName := a.definition.ResolveTypeNameBytes(argType) - // Look up subgraph IDs for the current field path - var subgraphIDs []string - if len(a.currentPath) > 0 { - pathKey := pathToKey(a.currentPath) - if ids, exists := a.subgraphIDMap[pathKey]; exists { - subgraphIDs = ids - } - } + // Get subgraph IDs using the path builder + subgraphIDs := a.subgraphMapper.getFieldSubgraphs(a.pathBuilder.key()) + + // Check if argument is null using null detector + arg := a.operation.Arguments[ref] + isNull := a.nullDetector.isValueNull(arg.Value) a.usage = append(a.usage, &graphqlmetrics.ArgumentUsageInfo{ Path: []string{string(fieldName), string(argName)}, TypeName: string(enclosingTypeName), NamedType: string(typeName), SubgraphIDs: subgraphIDs, + IsNull: isNull, }) } -// GetInputUsageInfo extracts usage for input types and fields from variable values. -// Builds field/variable → subgraph mappings, then traverses variable values to apply subgraph IDs. -// Handles nested inputs, scalars, and variable name remapping (e.g., normalized $a → original $criteria). -// Skips null values as they don't represent actual usage. -func GetInputUsageInfo(operation, definition *ast.Document, variables *astjson.Value, operationPlan plan.Plan, remapVariables map[string]string) ([]*graphqlmetrics.InputUsageInfo, error) { - // Build a mapping of field paths to their subgraph IDs from the plan - subgraphIDMap := buildFieldSubgraphIDMap(operationPlan) +// trackImplicitNullArguments tracks arguments defined in the schema but not provided in the operation. +// This is critical for breaking change detection - we need to know if arguments are being used or not. +func (a *argumentUsageInfoVisitor) trackImplicitNullArguments(fieldRef int) { + // Get enclosing node from top of stack + if len(a.fieldEnclosingNodeStack) == 0 { + return + } + fieldEnclosingNode := a.fieldEnclosingNodeStack[len(a.fieldEnclosingNodeStack)-1] - // Build a mapping of variables to the fields that use them and their subgraph IDs - variableSubgraphMap := buildVariableSubgraphMap(operation, definition, subgraphIDMap) + if fieldEnclosingNode.Kind == ast.NodeKindUnknown { + return + } - visitor := &inputUsageInfoVisitor{ - operation: operation, - definition: definition, - variables: variables, - variableSubgraphMap: variableSubgraphMap, - remapVariables: remapVariables, - // Pre-allocate with reasonable capacity to reduce allocations - usage: make([]*graphqlmetrics.InputUsageInfo, 0, 16), + // Skip introspection fields + fieldName := a.operation.FieldNameBytes(fieldRef) + if len(fieldName) > 1 && fieldName[0] == '_' && fieldName[1] == '_' { + return } - for i := range operation.VariableDefinitions { - visitor.EnterVariableDefinition(i) + enclosingTypeName := a.definition.NodeNameBytes(fieldEnclosingNode) + + // Get subgraph IDs for this field + subgraphIDs := a.subgraphMapper.getFieldSubgraphs(a.pathBuilder.key()) + + // Find all arguments defined for this field in the schema + var argumentRefs []int + switch fieldEnclosingNode.Kind { + case ast.NodeKindObjectTypeDefinition: + fieldDefs := a.definition.ObjectTypeDefinitions[fieldEnclosingNode.Ref].FieldsDefinition.Refs + for _, fieldDefRef := range fieldDefs { + fieldDef := a.definition.FieldDefinitions[fieldDefRef] + if bytes.Equal(a.definition.FieldDefinitionNameBytes(fieldDefRef), fieldName) { + if fieldDef.HasArgumentsDefinitions { + argumentRefs = fieldDef.ArgumentsDefinition.Refs + } + break + } + } + case ast.NodeKindInterfaceTypeDefinition: + fieldDefs := a.definition.InterfaceTypeDefinitions[fieldEnclosingNode.Ref].FieldsDefinition.Refs + for _, fieldDefRef := range fieldDefs { + fieldDef := a.definition.FieldDefinitions[fieldDefRef] + if bytes.Equal(a.definition.FieldDefinitionNameBytes(fieldDefRef), fieldName) { + if fieldDef.HasArgumentsDefinitions { + argumentRefs = fieldDef.ArgumentsDefinition.Refs + } + break + } + } } - return visitor.usage, nil + // Get the provided arguments map for this field level + var providedArguments map[string]struct{} + if len(a.providedArgumentsStack) > 0 { + providedArguments = a.providedArgumentsStack[len(a.providedArgumentsStack)-1] + } + + // Track arguments that are defined but not provided (implicitly null) + for _, argRef := range argumentRefs { + argName := string(a.definition.InputValueDefinitionNameString(argRef)) + + // Skip if this argument was already provided + if providedArguments != nil { + if _, provided := providedArguments[argName]; provided { + continue + } + } + + argType := a.definition.InputValueDefinitionType(argRef) + typeName := a.definition.ResolveTypeNameString(argType) + + // Track argument as implicitly null + a.usage = append(a.usage, &graphqlmetrics.ArgumentUsageInfo{ + Path: []string{string(fieldName), argName}, + TypeName: string(enclosingTypeName), + NamedType: typeName, + SubgraphIDs: subgraphIDs, + IsNull: true, // Implicitly null (not provided) + }) + } } -type inputUsageInfoVisitor struct { - definition, operation *ast.Document - variables *astjson.Value - variableSubgraphMap map[string][]string - remapVariables map[string]string - currentVariableName string - usage []*graphqlmetrics.InputUsageInfo +// ============================================ +// Input Type Resolver +// ============================================ + +// inputTypeResolver resolves input object field definitions from the schema. +type inputTypeResolver struct { + definition *ast.Document } -func (v *inputUsageInfoVisitor) EnterVariableDefinition(ref int) { - varTypeRef := v.operation.VariableDefinitions[ref].Type - varTypeName := v.operation.ResolveTypeNameString(varTypeRef) +func newInputTypeResolver(definition *ast.Document) *inputTypeResolver { + return &inputTypeResolver{definition: definition} +} - // Get the variable name from the (possibly normalized/minified) operation AST - // After normalization, variable names may be shortened: $criteria → $a - normalizedVarName := v.operation.VariableValueNameString(v.operation.VariableDefinitions[ref].VariableValue.Ref) +// resolveInputFields returns all field definitions for an input object type +func (r *inputTypeResolver) resolveInputFields(typeName string) []inputFieldInfo { + defNode, ok := r.definition.NodeByNameStr(typeName) + if !ok || defNode.Kind != ast.NodeKindInputObjectTypeDefinition { + return nil + } - // Map the normalized name back to the original if remapping is available - // The variables JSON always uses original names, but the AST uses normalized names - // Example: AST has "$a", remapVariables["a"] = "criteria", JSON has {"criteria": {...}} - originalVarName := normalizedVarName - if v.remapVariables != nil { - if remapped, exists := v.remapVariables[normalizedVarName]; exists { - originalVarName = remapped - } + inputObjectDef := r.definition.InputObjectTypeDefinitions[defNode.Ref] + fields := make([]inputFieldInfo, 0, len(inputObjectDef.InputFieldsDefinition.Refs)) + + for _, fieldRef := range inputObjectDef.InputFieldsDefinition.Refs { + fieldDef := r.definition.InputValueDefinitions[fieldRef] + fields = append(fields, inputFieldInfo{ + name: string(r.definition.Input.ByteSlice(fieldDef.Name)), + typeName: r.definition.ResolveTypeNameString(fieldDef.Type), + isList: r.definition.TypeIsList(fieldDef.Type), + }) } - // Look up the variable value using the original name - jsonField := v.variables.Get(originalVarName) - if jsonField == nil { + return fields +} + +// getNodeRef returns the node ref for a type by name +func (r *inputTypeResolver) getNodeRef(typeName string) int { + if node, ok := r.definition.NodeByNameStr(typeName); ok { + return node.Ref + } + return -1 +} + +// inputFieldInfo represents an input object field's name, type, and list indicator. +type inputFieldInfo struct { + name string + typeName string + isList bool +} + +// ============================================ +// Input Traverser +// ============================================ + +// inputTraverser traverses JSON variable values to extract input usage metrics. +// Tracks explicit nulls, implicit nulls (missing fields), and enum values. +type inputTraverser struct { + definition *ast.Document + typeResolver *inputTypeResolver + subgraphMapper *subgraphMapper + currentVariableName string + usage []*graphqlmetrics.InputUsageInfo +} + +func newInputTraverser(definition *ast.Document, subgraphMapper *subgraphMapper) *inputTraverser { + return &inputTraverser{ + definition: definition, + typeResolver: newInputTypeResolver(definition), + subgraphMapper: subgraphMapper, + usage: make([]*graphqlmetrics.InputUsageInfo, 0, 16), + } +} + +// traverse handles input value traversal, dispatching to specialized handlers by type kind. +// Implements null propagation: when isNull is true, tracking stops at this level. +func (t *inputTraverser) traverse(jsonValue *astjson.Value, fieldName, typeName, parentTypeName string, isNull bool) { + usageInfo := t.createUsageInfo(fieldName, typeName, parentTypeName, isNull) + + defNode, ok := t.definition.NodeByNameStr(typeName) + if !ok { + // Built-in scalar + t.appendUniqueUsage(usageInfo) return } - // Skip null values - they don't represent actual schema usage - if jsonField.Type() == astjson.TypeNull { + // If null, track and stop propagation + if isNull { + t.appendUniqueUsage(usageInfo) return } - // Use the normalized name for subgraph ID lookup (it matches the AST structure) - v.currentVariableName = normalizedVarName - v.traverseVariable(jsonField, originalVarName, varTypeName, "") -} + // Dispatch based on type kind + switch defNode.Kind { + case ast.NodeKindInputObjectTypeDefinition: + t.traverseInputObject(jsonValue, fieldName, typeName, parentTypeName, defNode, usageInfo) + case ast.NodeKindEnumTypeDefinition: + t.traverseEnum(jsonValue, usageInfo) + case ast.NodeKindScalarTypeDefinition: + // Custom scalar - just track + } -// traverseVariable recursively processes variable values, tracking input types and fields. -// Handles scalars, enums, input objects, and arrays. SubgraphIDs inherited from variableSubgraphMap. -func (v *inputUsageInfoVisitor) traverseVariable(jsonValue *astjson.Value, fieldName, typeName, parentTypeName string) { - defNode, ok := v.definition.NodeByNameStr(typeName) + t.appendUniqueUsage(usageInfo) +} - usageInfo := &graphqlmetrics.InputUsageInfo{ +// createUsageInfo builds usage info with path, type names, and subgraph IDs. +func (t *inputTraverser) createUsageInfo(fieldName, typeName, parentTypeName string, isNull bool) *graphqlmetrics.InputUsageInfo { + info := &graphqlmetrics.InputUsageInfo{ NamedType: typeName, + IsNull: isNull, } + if parentTypeName != "" { - usageInfo.TypeName = parentTypeName - // Pre-allocate Path slice with exact capacity - usageInfo.Path = []string{parentTypeName, fieldName} + info.TypeName = parentTypeName + info.Path = []string{parentTypeName, fieldName} + } else { + // For root input types, set Path to identify the type itself + info.Path = []string{typeName} + } + + // Get subgraph IDs + if t.currentVariableName != "" { + info.SubgraphIDs = t.subgraphMapper.getVariableSubgraphs(t.currentVariableName) } - // Get subgraph IDs for this variable from the mapping built in STEP 2 - // All fields in this variable inherit the same subgraph IDs - if v.currentVariableName != "" { - if subgraphIDs, exists := v.variableSubgraphMap[v.currentVariableName]; exists { - usageInfo.SubgraphIDs = subgraphIDs + return info +} + +// traverseInputObject handles input object traversal with implicit null tracking +func (t *inputTraverser) traverseInputObject(jsonValue *astjson.Value, fieldName, typeName, parentTypeName string, defNode ast.Node, usageInfo *graphqlmetrics.InputUsageInfo) { + switch jsonValue.Type() { + case astjson.TypeArray: + for _, arrayValue := range jsonValue.GetArray() { + t.traverse(arrayValue, fieldName, typeName, parentTypeName, false) } + case astjson.TypeObject: + t.processObjectFields(jsonValue, typeName, usageInfo.SubgraphIDs) } +} - // If the type is not found in the definition (e.g., built-in scalars like Boolean, String, Int), - // we still want to track its usage. - // Built-in scalars don't have type definitions in the schema document. - if !ok { - // This is likely a built-in scalar type, track it and return - v.appendUniqueUsage(usageInfo) +// processObjectFields processes present fields and tracks implicit nulls (missing fields). +func (t *inputTraverser) processObjectFields(jsonValue *astjson.Value, parentTypeName string, subgraphIDs []string) { + o := jsonValue.GetObject() + presentFields := make(map[string]bool, 8) // Capacity hint: most input objects have <8 fields + + // Process present fields + o.Visit(func(key []byte, value *astjson.Value) { + keyStr := string(key) + presentFields[keyStr] = true + t.processField(keyStr, value, parentTypeName) + }) + + // Process missing fields (implicit nulls) + allFields := t.typeResolver.resolveInputFields(parentTypeName) + for _, fieldInfo := range allFields { + if !presentFields[fieldInfo.name] { + t.trackImplicitNull(fieldInfo, parentTypeName, subgraphIDs) + } + } +} + +// processField handles a single field from the JSON object +func (t *inputTraverser) processField(fieldName string, value *astjson.Value, parentTypeName string) { + nodeRef := t.typeResolver.getNodeRef(parentTypeName) + if nodeRef == -1 { return } - switch defNode.Kind { - case ast.NodeKindInputObjectTypeDefinition: - switch jsonValue.Type() { - case astjson.TypeArray: - for _, arrayValue := range jsonValue.GetArray() { - v.traverseVariable(arrayValue, fieldName, typeName, parentTypeName) - } - case astjson.TypeObject: - o := jsonValue.GetObject() - o.Visit(func(key []byte, value *astjson.Value) { - // Skip null fields - they don't represent actual schema usage - if value.Type() == astjson.TypeNull { - return - } + fieldRef := t.definition.InputObjectTypeDefinitionInputValueDefinitionByName(nodeRef, []byte(fieldName)) + if fieldRef == -1 { + return + } - fieldRef := v.definition.InputObjectTypeDefinitionInputValueDefinitionByName(defNode.Ref, key) - if fieldRef == -1 { - return - } - fieldTypeName := v.definition.ResolveTypeNameString(v.definition.InputValueDefinitions[fieldRef].Type) - if v.definition.TypeIsList(v.definition.InputValueDefinitions[fieldRef].Type) { - for _, arrayValue := range value.GetArray() { - v.traverseVariable(arrayValue, string(key), fieldTypeName, typeName) - } - } else { - v.traverseVariable(value, string(key), fieldTypeName, typeName) - } - }) - } + fieldDef := t.definition.InputValueDefinitions[fieldRef] + fieldTypeName := t.definition.ResolveTypeNameString(fieldDef.Type) + fieldIsNull := value.Type() == astjson.TypeNull - case ast.NodeKindEnumTypeDefinition: - switch jsonValue.Type() { - case astjson.TypeString: - usageInfo.EnumValues = []string{string(jsonValue.GetStringBytes())} - case astjson.TypeArray: - arr := jsonValue.GetArray() - // Pre-allocate EnumValues slice with exact capacity - usageInfo.EnumValues = make([]string, len(arr)) - for i, arrayValue := range arr { - usageInfo.EnumValues[i] = string(arrayValue.GetStringBytes()) - } + if t.definition.TypeIsList(fieldDef.Type) { + for _, arrayValue := range value.GetArray() { + t.traverse(arrayValue, fieldName, fieldTypeName, parentTypeName, false) } - case ast.NodeKindScalarTypeDefinition: - // Custom scalar types defined in the schema (e.g., DateTime, JSON, Upload) - // Just track the usage, no special handling needed since we can't inspect - // the internal structure of custom scalars + } else { + t.traverse(value, fieldName, fieldTypeName, parentTypeName, fieldIsNull) + } +} + +// trackImplicitNull creates usage info for fields not present in JSON (implicitly null). +func (t *inputTraverser) trackImplicitNull(fieldInfo inputFieldInfo, parentTypeName string, subgraphIDs []string) { + implicitUsageInfo := &graphqlmetrics.InputUsageInfo{ + NamedType: fieldInfo.typeName, + TypeName: parentTypeName, + Path: []string{parentTypeName, fieldInfo.name}, + IsNull: true, + SubgraphIDs: subgraphIDs, } + t.appendUniqueUsage(implicitUsageInfo) +} - v.appendUniqueUsage(usageInfo) +// traverseEnum handles enum value extraction +func (t *inputTraverser) traverseEnum(jsonValue *astjson.Value, usageInfo *graphqlmetrics.InputUsageInfo) { + switch jsonValue.Type() { + case astjson.TypeString: + usageInfo.EnumValues = []string{string(jsonValue.GetStringBytes())} + case astjson.TypeArray: + arr := jsonValue.GetArray() + usageInfo.EnumValues = make([]string, len(arr)) + for i, arrayValue := range arr { + usageInfo.EnumValues[i] = string(arrayValue.GetStringBytes()) + } + } } -func (v *inputUsageInfoVisitor) appendUniqueUsage(info *graphqlmetrics.InputUsageInfo) { - for _, u := range v.usage { - if v.infoEquals(u, info) { +func (t *inputTraverser) appendUniqueUsage(info *graphqlmetrics.InputUsageInfo) { + for _, u := range t.usage { + if t.infoEquals(u, info) { return } } - v.usage = append(v.usage, info) + t.usage = append(t.usage, info) } -func (v *inputUsageInfoVisitor) infoEquals(a, b *graphqlmetrics.InputUsageInfo) bool { +func (t *inputTraverser) infoEquals(a, b *graphqlmetrics.InputUsageInfo) bool { if a.Count != b.Count { return false } @@ -535,6 +842,9 @@ func (v *inputUsageInfoVisitor) infoEquals(a, b *graphqlmetrics.InputUsageInfo) if a.TypeName != b.TypeName { return false } + if a.IsNull != b.IsNull { + return false + } if len(a.Path) != len(b.Path) { return false } @@ -561,3 +871,189 @@ func (v *inputUsageInfoVisitor) infoEquals(a, b *graphqlmetrics.InputUsageInfo) } return true } + +// ============================================ +// Variable Definition Processing +// ============================================ + +// processVariableDefinition processes a variable definition and initiates input traversal. +func processVariableDefinition(traverser *inputTraverser, operation *ast.Document, variables *astjson.Value, nullDetector *nullValueDetector, ref int) { + varDef := operation.VariableDefinitions[ref] + varTypeRef := varDef.Type + varTypeName := operation.ResolveTypeNameString(varTypeRef) + + // Get normalized variable name from AST + normalizedVarName := operation.VariableValueNameString(varDef.VariableValue.Ref) + + // Map back to original name for JSON lookup + originalVarName := nullDetector.getOriginalVariableName(normalizedVarName) + + // Look up the variable value + jsonField := variables.Get(originalVarName) + if jsonField == nil { + return + } + + // Use normalized name for subgraph lookup + traverser.currentVariableName = normalizedVarName + + // Always track input usage, even when null + isNull := jsonField.Type() == astjson.TypeNull + traverser.traverse(jsonField, originalVarName, varTypeName, "", isNull) +} + +// collectImplicitArgumentInputUsage walks the operation and tracks input usage for +// implicitly null input type arguments (arguments defined in schema but not provided in operation). +func collectImplicitArgumentInputUsage(operation, definition *ast.Document, subgraphMapper *subgraphMapper, traverser *inputTraverser) { + walker := astvisitor.NewWalker(48) + collector := &implicitArgumentInputCollector{ + walker: &walker, + definition: definition, + operation: operation, + subgraphMapper: subgraphMapper, + traverser: traverser, + pathBuilder: newPathBuilder(8), + argumentsStack: make([]map[string]struct{}, 0, 8), + enclosingStack: make([]ast.Node, 0, 8), + } + walker.RegisterEnterFieldVisitor(collector) + walker.RegisterLeaveFieldVisitor(collector) + walker.RegisterEnterArgumentVisitor(collector) + rep := &operationreport.Report{} + walker.Walk(operation, definition, rep) +} + +// implicitArgumentInputCollector collects input usage for implicitly null input type arguments +type implicitArgumentInputCollector struct { + walker *astvisitor.Walker + definition *ast.Document + operation *ast.Document + subgraphMapper *subgraphMapper + traverser *inputTraverser + pathBuilder *pathBuilder + argumentsStack []map[string]struct{} // Track provided arguments per field + enclosingStack []ast.Node +} + +func (c *implicitArgumentInputCollector) EnterField(ref int) { + c.enclosingStack = append(c.enclosingStack, c.walker.EnclosingTypeDefinition) + c.argumentsStack = append(c.argumentsStack, nil) + fieldName := c.operation.FieldNameString(ref) + c.pathBuilder.push(fieldName) +} + +func (c *implicitArgumentInputCollector) LeaveField(ref int) { + // Check for implicit null input type arguments + c.trackImplicitInputTypeArguments(ref) + + c.pathBuilder.pop() + if len(c.enclosingStack) > 0 { + c.enclosingStack = c.enclosingStack[:len(c.enclosingStack)-1] + } + if len(c.argumentsStack) > 0 { + c.argumentsStack = c.argumentsStack[:len(c.argumentsStack)-1] + } +} + +func (c *implicitArgumentInputCollector) EnterArgument(ref int) { + argName := c.operation.ArgumentNameBytes(ref) + anc := c.walker.Ancestors[len(c.walker.Ancestors)-1] + if anc.Kind != ast.NodeKindField { + return + } + + // Lazily allocate map and track provided argument + if len(c.argumentsStack) > 0 { + stackIdx := len(c.argumentsStack) - 1 + if c.argumentsStack[stackIdx] == nil { + c.argumentsStack[stackIdx] = make(map[string]struct{}, 4) + } + c.argumentsStack[stackIdx][string(argName)] = struct{}{} + } +} + +func (c *implicitArgumentInputCollector) trackImplicitInputTypeArguments(fieldRef int) { + if len(c.enclosingStack) == 0 { + return + } + enclosingNode := c.enclosingStack[len(c.enclosingStack)-1] + if enclosingNode.Kind == ast.NodeKindUnknown { + return + } + + fieldName := c.operation.FieldNameBytes(fieldRef) + // Skip introspection fields + if len(fieldName) > 1 && fieldName[0] == '_' && fieldName[1] == '_' { + return + } + + // Get subgraph IDs for this field + subgraphIDs := c.subgraphMapper.getFieldSubgraphs(c.pathBuilder.key()) + + // Find all arguments defined for this field + var argumentRefs []int + switch enclosingNode.Kind { + case ast.NodeKindObjectTypeDefinition: + fieldDefs := c.definition.ObjectTypeDefinitions[enclosingNode.Ref].FieldsDefinition.Refs + for _, fieldDefRef := range fieldDefs { + fieldDef := c.definition.FieldDefinitions[fieldDefRef] + if bytes.Equal(c.definition.FieldDefinitionNameBytes(fieldDefRef), fieldName) { + if fieldDef.HasArgumentsDefinitions { + argumentRefs = fieldDef.ArgumentsDefinition.Refs + } + break + } + } + case ast.NodeKindInterfaceTypeDefinition: + fieldDefs := c.definition.InterfaceTypeDefinitions[enclosingNode.Ref].FieldsDefinition.Refs + for _, fieldDefRef := range fieldDefs { + fieldDef := c.definition.FieldDefinitions[fieldDefRef] + if bytes.Equal(c.definition.FieldDefinitionNameBytes(fieldDefRef), fieldName) { + if fieldDef.HasArgumentsDefinitions { + argumentRefs = fieldDef.ArgumentsDefinition.Refs + } + break + } + } + } + + // Get provided arguments for this field + var providedArgs map[string]struct{} + if len(c.argumentsStack) > 0 { + providedArgs = c.argumentsStack[len(c.argumentsStack)-1] + } + + // Track input usage for implicitly null input type arguments + for _, argRef := range argumentRefs { + argName := string(c.definition.InputValueDefinitionNameString(argRef)) + + // Skip if argument was provided + if providedArgs != nil { + if _, provided := providedArgs[argName]; provided { + continue + } + } + + argType := c.definition.InputValueDefinitionType(argRef) + typeName := c.definition.ResolveTypeNameString(argType) + + // Check if this is an input object type + defNode, ok := c.definition.NodeByNameStr(typeName) + if !ok { + continue + } + + // Only track input object types (not scalars or enums) + if defNode.Kind != ast.NodeKindInputObjectTypeDefinition { + continue + } + + // Add input usage for the implicitly null input type + c.traverser.appendUniqueUsage(&graphqlmetrics.InputUsageInfo{ + NamedType: typeName, + Path: []string{typeName}, + SubgraphIDs: subgraphIDs, + IsNull: true, // Implicitly null (not provided) + }) + } +} diff --git a/router/pkg/graphqlschemausage/schemausage_bench_test.go b/router/pkg/graphqlschemausage/schemausage_bench_test.go index 87c0f9ce1e..d2118cb18e 100644 --- a/router/pkg/graphqlschemausage/schemausage_bench_test.go +++ b/router/pkg/graphqlschemausage/schemausage_bench_test.go @@ -119,13 +119,13 @@ func BenchmarkGetTypeFieldUsageInfo(b *testing.B) { // BenchmarkGetArgumentUsageInfo measures memory allocations when extracting argument usage func BenchmarkGetArgumentUsageInfo(b *testing.B) { - generatedPlan, operation, definition, _ := setupBenchmark(b) + generatedPlan, operation, definition, variables := setupBenchmark(b) b.ResetTimer() b.ReportAllocs() for i := 0; i < b.N; i++ { - result, err := GetArgumentUsageInfo(operation, definition, generatedPlan) + result, err := GetArgumentUsageInfo(operation, definition, variables, generatedPlan, nil) if err != nil { b.Fatal(err) } @@ -179,7 +179,7 @@ func BenchmarkSchemaUsageEndToEnd(b *testing.B) { _ = TypeFieldMetrics(typeFieldUsage).IntoGraphQLMetrics() // Extract argument usage - argUsage, err := GetArgumentUsageInfo(operation, definition, generatedPlan) + argUsage, err := GetArgumentUsageInfo(operation, definition, variables, generatedPlan, nil) if err != nil { b.Fatal(err) } @@ -316,7 +316,7 @@ func BenchmarkSchemaUsageWithManyFields(b *testing.B) { typeFieldUsage := GetTypeFieldUsageInfo(generatedPlan) // Extract argument usage - argUsage, err := GetArgumentUsageInfo(operation, definition, generatedPlan) + argUsage, err := GetArgumentUsageInfo(operation, definition, variables, generatedPlan, nil) if err != nil { b.Fatal(err) } @@ -327,6 +327,7 @@ func BenchmarkSchemaUsageWithManyFields(b *testing.B) { b.Fatal(err) } + // Prevent compiler optimization _ = typeFieldUsage _ = argUsage _ = inputUsage diff --git a/router/pkg/graphqlschemausage/schemausage_test.go b/router/pkg/graphqlschemausage/schemausage_test.go index 39df6da912..c37cfccbe7 100644 --- a/router/pkg/graphqlschemausage/schemausage_test.go +++ b/router/pkg/graphqlschemausage/schemausage_test.go @@ -210,7 +210,7 @@ func TestGetSchemaUsageInfo(t *testing.T) { assert.NoError(t, err) fieldUsageInfo := GetTypeFieldUsageInfo(generatedPlan) - argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, generatedPlan) + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, merged, generatedPlan, nil) assert.NoError(t, err) inputUsageInfo, err := GetInputUsageInfo(&op, &def, merged, generatedPlan, nil) assert.NoError(t, err) @@ -222,7 +222,7 @@ func TestGetSchemaUsageInfo(t *testing.T) { } subscriptionFieldUsageInfo := GetTypeFieldUsageInfo(subscription) - subscriptionArgumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, subscription) + subscriptionArgumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, merged, subscription, nil) assert.NoError(t, err) subscriptionInputUsageInfo, err := GetInputUsageInfo(&op, &def, merged, subscription, nil) assert.NoError(t, err) @@ -288,55 +288,65 @@ func TestGetSchemaUsageInfo(t *testing.T) { NamedType: "String", Path: []string{"searchResults", "name"}, SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, { TypeName: "Query", NamedType: "SearchFilter", Path: []string{"searchResults", "filter"}, SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, { TypeName: "Query", NamedType: "SearchFilter", Path: []string{"searchResults", "filter2"}, SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, { TypeName: "Query", NamedType: "Episode", Path: []string{"searchResults", "enumValue"}, SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, { TypeName: "Query", NamedType: "Episode", Path: []string{"searchResults", "enumList"}, SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, { TypeName: "Query", NamedType: "Episode", Path: []string{"searchResults", "enumList2"}, SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, { TypeName: "Query", NamedType: "SearchFilter", Path: []string{"searchResults", "filterList"}, SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, { TypeName: "Human", NamedType: "String", Path: []string{"inlineName", "name"}, SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, } expectedInputUsageInfo := []graphqlmetricsv1.InputUsageInfo{ { NamedType: "String", + Path: []string{"String"}, SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, { NamedType: "Episode", @@ -344,26 +354,50 @@ func TestGetSchemaUsageInfo(t *testing.T) { EnumValues: []string{"NEWHOPE"}, Path: []string{"SearchFilter", "enumField"}, SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, + }, + { + // filter2 has enumField but excludeName is implicitly null + NamedType: "String", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "excludeName"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: true, }, { NamedType: "SearchFilter", + Path: []string{"SearchFilter"}, SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, { NamedType: "Episode", + Path: []string{"Episode"}, EnumValues: []string{"EMPIRE"}, SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, { NamedType: "Episode", + Path: []string{"Episode"}, EnumValues: []string{"JEDI", "EMPIRE", "NEWHOPE"}, SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, { NamedType: "String", TypeName: "SearchFilter", Path: []string{"SearchFilter", "excludeName"}, SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, + }, + { + // filterList[0] has excludeName but enumField is implicitly null + NamedType: "Episode", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "enumField"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: true, }, { NamedType: "Episode", @@ -371,11 +405,14 @@ func TestGetSchemaUsageInfo(t *testing.T) { EnumValues: []string{"JEDI"}, Path: []string{"SearchFilter", "enumField"}, SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, { NamedType: "Episode", + Path: []string{"Episode"}, EnumValues: []string{"JEDI", "EMPIRE"}, SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, } @@ -472,7 +509,7 @@ func TestGetSchemaUsageInfoInterfaces(t *testing.T) { } fieldUsageInfo := GetTypeFieldUsageInfo(generatedPlan) - argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, generatedPlan) + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, astjson.MustParse(`{}`), generatedPlan, nil) assert.NoError(t, err) inputUsageInfo, err := GetInputUsageInfo(&op, &def, astjson.MustParse(`{}`), generatedPlan, nil) assert.NoError(t, err) @@ -484,7 +521,7 @@ func TestGetSchemaUsageInfoInterfaces(t *testing.T) { } subscriptionFieldUsageInfo := GetTypeFieldUsageInfo(subscription) - subscriptionArgumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, subscription) + subscriptionArgumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, astjson.MustParse(`{}`), subscription, nil) assert.NoError(t, err) subscriptionInputUsageInfo, err := GetInputUsageInfo(&op, &def, astjson.MustParse(`{}`), subscription, nil) assert.NoError(t, err) @@ -527,7 +564,7 @@ func TestGetSchemaUsageInfoInterfaces(t *testing.T) { } } -// TestInputUsageWithNullVariables verifies that null variable values are not tracked as schema usage +// TestInputUsageWithNullVariables verifies that null variable values are tracked with IsNull flag func TestInputUsageWithNullVariables(t *testing.T) { schema := ` schema { @@ -605,11 +642,23 @@ func TestInputUsageWithNullVariables(t *testing.T) { inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) require.NoError(t, err) - // Should be empty because the variable value is null - assert.Empty(t, inputUsageInfo, "Null variable values should not be tracked as usage") + // Should track null value with IsNull flag set to true + expectedUsage := []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "EmployeeSearchInput", + Path: []string{"EmployeeSearchInput"}, + SubgraphIDs: []string{"employees-subgraph"}, + IsNull: true, + }, + } + + assert.Len(t, inputUsageInfo, len(expectedUsage), "Null variable values should be tracked with IsNull=true") + for i := range expectedUsage { + assert.JSONEq(t, prettyJSON(t, &expectedUsage[i]), prettyJSON(t, inputUsageInfo[i]), "inputUsageInfo[%d]", i) + } } -// TestInputUsageWithPartialNullFields verifies that null fields within input objects are not tracked +// TestInputUsageWithPartialNullFields verifies that null fields within input objects are tracked with IsNull flag func TestInputUsageWithPartialNullFields(t *testing.T) { schema := ` schema { @@ -688,21 +737,38 @@ func TestInputUsageWithPartialNullFields(t *testing.T) { inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) require.NoError(t, err) - // Should only track the input type and hasPets field, not the null fields + // Should track the input type, hasPets field, and null fields with IsNull flag expectedUsage := []graphqlmetricsv1.InputUsageInfo{ { NamedType: "Boolean", TypeName: "EmployeeSearchInput", Path: []string{"EmployeeSearchInput", "hasPets"}, SubgraphIDs: []string{"employees-subgraph"}, + IsNull: false, + }, + { + NamedType: "String", + TypeName: "EmployeeSearchInput", + Path: []string{"EmployeeSearchInput", "department"}, + SubgraphIDs: []string{"employees-subgraph"}, + IsNull: true, + }, + { + NamedType: "Int", + TypeName: "EmployeeSearchInput", + Path: []string{"EmployeeSearchInput", "minAge"}, + SubgraphIDs: []string{"employees-subgraph"}, + IsNull: true, }, { NamedType: "EmployeeSearchInput", + Path: []string{"EmployeeSearchInput"}, SubgraphIDs: []string{"employees-subgraph"}, + IsNull: false, }, } - assert.Len(t, inputUsageInfo, len(expectedUsage), "Should only track non-null fields") + assert.Len(t, inputUsageInfo, len(expectedUsage), "Should track all fields including null ones") for i := range expectedUsage { assert.JSONEq(t, prettyJSON(t, &expectedUsage[i]), prettyJSON(t, inputUsageInfo[i]), "inputUsageInfo[%d]", i) } @@ -794,22 +860,27 @@ func TestInputScalarFieldsInVariables(t *testing.T) { TypeName: "EmployeeSearchInput", Path: []string{"EmployeeSearchInput", "hasPets"}, SubgraphIDs: []string{"employees-subgraph"}, + IsNull: false, }, { NamedType: "Int", TypeName: "EmployeeSearchInput", Path: []string{"EmployeeSearchInput", "minAge"}, SubgraphIDs: []string{"employees-subgraph"}, + IsNull: false, }, { NamedType: "String", TypeName: "EmployeeSearchInput", Path: []string{"EmployeeSearchInput", "department"}, SubgraphIDs: []string{"employees-subgraph"}, + IsNull: false, }, { NamedType: "EmployeeSearchInput", + Path: []string{"EmployeeSearchInput"}, SubgraphIDs: []string{"employees-subgraph"}, + IsNull: false, }, } @@ -928,52 +999,62 @@ func TestInputNestedScalarFields(t *testing.T) { TypeName: "SearchFilter", Path: []string{"SearchFilter", "name"}, SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, }, { NamedType: "Int", TypeName: "SearchCriteria", Path: []string{"SearchCriteria", "minScore"}, SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, }, { NamedType: "Int", TypeName: "SearchCriteria", Path: []string{"SearchCriteria", "maxScore"}, SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, }, { NamedType: "Boolean", TypeName: "SearchCriteria", Path: []string{"SearchCriteria", "isActive"}, SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, }, { NamedType: "String", TypeName: "NestedCriteria", Path: []string{"NestedCriteria", "value"}, SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, }, { NamedType: "NestedCriteria", TypeName: "SearchCriteria", Path: []string{"SearchCriteria", "nested"}, SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, }, { NamedType: "SearchCriteria", TypeName: "SearchFilter", Path: []string{"SearchFilter", "criteria"}, SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, }, { NamedType: "String", TypeName: "SearchFilter", Path: []string{"SearchFilter", "tags"}, SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, }, { NamedType: "SearchFilter", + Path: []string{"SearchFilter"}, SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, }, } @@ -1108,7 +1189,7 @@ func TestMultipleSubgraphs(t *testing.T) { require.NoError(t, err) fieldUsageInfo := GetTypeFieldUsageInfo(generatedPlan) - argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, generatedPlan) + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, vars, generatedPlan, nil) require.NoError(t, err) inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) require.NoError(t, err) @@ -1184,12 +1265,14 @@ func TestMultipleSubgraphs(t *testing.T) { NamedType: "ID", Path: []string{"user", "id"}, SubgraphIDs: []string{"users-subgraph"}, + IsNull: false, }, { TypeName: "Query", NamedType: "ProductFilter", Path: []string{"product", "filter"}, SubgraphIDs: []string{"products-subgraph"}, + IsNull: false, }, } @@ -1197,29 +1280,36 @@ func TestMultipleSubgraphs(t *testing.T) { expectedInputUsageInfo := []graphqlmetricsv1.InputUsageInfo{ { NamedType: "ID", + Path: []string{"ID"}, SubgraphIDs: []string{"users-subgraph"}, + IsNull: false, }, { NamedType: "Float", TypeName: "ProductFilter", Path: []string{"ProductFilter", "minPrice"}, SubgraphIDs: []string{"products-subgraph"}, + IsNull: false, }, { NamedType: "Float", TypeName: "ProductFilter", Path: []string{"ProductFilter", "maxPrice"}, SubgraphIDs: []string{"products-subgraph"}, + IsNull: false, }, { NamedType: "String", TypeName: "ProductFilter", Path: []string{"ProductFilter", "category"}, SubgraphIDs: []string{"products-subgraph"}, + IsNull: false, }, { NamedType: "ProductFilter", + Path: []string{"ProductFilter"}, SubgraphIDs: []string{"products-subgraph"}, + IsNull: false, }, } @@ -1264,6 +1354,1132 @@ func TestMultipleSubgraphs(t *testing.T) { } } +// TestNullPropagationScenarios tests the null propagation scenarios from the breaking change detection document +func TestNullPropagationScenarios(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + a(input: Input): ID + } + + input Input { + a: NestedInput + } + + input NestedInput { + a: SuperNestedInput + } + + input SuperNestedInput { + a: ID + } + ` + + tests := []struct { + name string + variables string + expectedUsage []graphqlmetricsv1.InputUsageInfo + description string + }{ + { + name: "input null - explicitly", + variables: `{"input": null}`, + expectedUsage: []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "Input", + Path: []string{"Input"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: true, + }, + }, + description: "Explicit null at top level - chain ends here", + }, + { + name: "input empty object - implicit null nested field", + variables: `{"input": {}}`, + expectedUsage: []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "NestedInput", + TypeName: "Input", + Path: []string{"Input", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: true, + }, + { + NamedType: "Input", + Path: []string{"Input"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + }, + description: "Empty object means nested field 'a' is implicitly null and should be tracked", + }, + { + name: "input.a null - explicitly", + variables: `{"input": {"a": null}}`, + expectedUsage: []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "NestedInput", + TypeName: "Input", + Path: []string{"Input", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: true, + }, + { + NamedType: "Input", + Path: []string{"Input"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + }, + description: "Explicit null at nested level - chain ends at Input.a", + }, + { + name: "input.a empty object - implicit null doubly nested field", + variables: `{"input": {"a": {}}}`, + expectedUsage: []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "SuperNestedInput", + TypeName: "NestedInput", + Path: []string{"NestedInput", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: true, + }, + { + NamedType: "NestedInput", + TypeName: "Input", + Path: []string{"Input", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + { + NamedType: "Input", + Path: []string{"Input"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + }, + description: "Empty nested object means doubly nested field 'a' is implicitly null and should be tracked", + }, + { + name: "input.a.a null - explicitly", + variables: `{"input": {"a": {"a": null}}}`, + expectedUsage: []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "SuperNestedInput", + TypeName: "NestedInput", + Path: []string{"NestedInput", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: true, + }, + { + NamedType: "NestedInput", + TypeName: "Input", + Path: []string{"Input", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + { + NamedType: "Input", + Path: []string{"Input"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + }, + description: "Explicit null at doubly nested level - chain ends at NestedInput.a", + }, + { + name: "input.a.a empty object - implicit null triply nested field", + variables: `{"input": {"a": {"a": {}}}}`, + expectedUsage: []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "ID", + TypeName: "SuperNestedInput", + Path: []string{"SuperNestedInput", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: true, + }, + { + NamedType: "SuperNestedInput", + TypeName: "NestedInput", + Path: []string{"NestedInput", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + { + NamedType: "NestedInput", + TypeName: "Input", + Path: []string{"Input", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + { + NamedType: "Input", + Path: []string{"Input"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + }, + description: "Empty doubly nested object means triply nested field 'a' is implicitly null and should be tracked", + }, + { + name: "input.a.a.a null - explicitly", + variables: `{"input": {"a": {"a": {"a": null}}}}`, + expectedUsage: []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "ID", + TypeName: "SuperNestedInput", + Path: []string{"SuperNestedInput", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: true, + }, + { + NamedType: "SuperNestedInput", + TypeName: "NestedInput", + Path: []string{"NestedInput", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + { + NamedType: "NestedInput", + TypeName: "Input", + Path: []string{"Input", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + { + NamedType: "Input", + Path: []string{"Input"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + }, + description: "Explicit null at leaf level - full chain is tracked with leaf as null", + }, + { + name: "input.a.a.a with value - no nulls", + variables: `{"input": {"a": {"a": {"a": "123"}}}}`, + expectedUsage: []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "ID", + TypeName: "SuperNestedInput", + Path: []string{"SuperNestedInput", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + { + NamedType: "SuperNestedInput", + TypeName: "NestedInput", + Path: []string{"NestedInput", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + { + NamedType: "NestedInput", + TypeName: "Input", + Path: []string{"Input", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + { + NamedType: "Input", + Path: []string{"Input"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + }, + description: "Full chain with actual value - no nulls in the chain", + }, + } + + operation := ` + query TestQuery($input: Input) { + a(input: $input) + } + ` + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "test-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"a"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "TestQuery", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(tt.variables) + require.NoError(t, err) + + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + assert.Len(t, inputUsageInfo, len(tt.expectedUsage), tt.description) + for i := range tt.expectedUsage { + assert.JSONEq(t, prettyJSON(t, &tt.expectedUsage[i]), prettyJSON(t, inputUsageInfo[i]), + "inputUsageInfo[%d] - %s", i, tt.description) + } + }) + } +} + +// TestArgumentUsageWithNullArgument verifies that null argument values are tracked with IsNull flag +func TestArgumentUsageWithNullArgument(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + findEmployees(criteria: SearchInput): [Employee!]! + } + + type Employee { + id: ID! + details: EmployeeDetails + } + + type EmployeeDetails { + forename: String + } + + input SearchInput { + department: String + minAge: Int + } + ` + + operation := ` + query FindEmployeesWithVariable($criteria: SearchInput) { + findEmployees(criteria: $criteria) { + id + details { + forename + } + } + } + ` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "employees-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"findEmployees"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Employee", FieldNames: []string{"id", "details"}}, + {TypeName: "EmployeeDetails", FieldNames: []string{"forename"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "FindEmployeesWithVariable", report) + require.False(t, report.HasErrors()) + + vars := astjson.MustParse(`{"criteria": null}`) + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Should track the null argument with IsNull=true + expectedUsage := []*graphqlmetricsv1.ArgumentUsageInfo{ + { + TypeName: "Query", + NamedType: "SearchInput", + Path: []string{"findEmployees", "criteria"}, + SubgraphIDs: []string{"employees-subgraph"}, + IsNull: true, + }, + } + + assert.Len(t, argumentUsageInfo, len(expectedUsage), "Null argument should be tracked with IsNull=true") + for i := range expectedUsage { + assert.JSONEq(t, prettyJSON(t, expectedUsage[i]), prettyJSON(t, argumentUsageInfo[i]), "argumentUsageInfo[%d]", i) + } +} + +// TestVariableRemapping verifies that variable name remapping works correctly after normalization. +// This tests the real-world scenario where operations are normalized/minified and variable names +// change (e.g., $criteria → $a), requiring remapping to find variable values in the JSON. +func TestVariableRemapping(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + findEmployees(criteria: SearchInput, status: String): [Employee!]! + } + + type Employee { + id: ID! + details: EmployeeDetails + } + + type EmployeeDetails { + forename: String + surname: String + } + + input SearchInput { + department: String + minAge: Int + active: Boolean + } + ` + + // Original operation with descriptive variable names + operation := ` + query FindEmployeesQuery($searchCriteria: SearchInput, $employeeStatus: String) { + findEmployees(criteria: $searchCriteria, status: $employeeStatus) { + id + details { + forename + surname + } + } + } + ` + + // Variables use original names + variables := `{ + "searchCriteria": { + "department": "Engineering", + "minAge": 25, + "active": true + }, + "employeeStatus": null + }` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + // Use the actual variables remapper to generate the remapping + // This simulates what happens in the router during operation processing + remapper := astnormalization.NewVariablesMapper() + op.Input.Variables = []byte(variables) + remapReport := &operationreport.Report{} + variablesMap := remapper.NormalizeOperation(&op, &def, remapReport) + require.False(t, remapReport.HasErrors()) + require.NotEmpty(t, variablesMap, "Variables should be remapped after normalization") + + // variablesMap maps normalized names (e.g., "a", "b") to original names (e.g., "searchCriteria", "employeeStatus") + t.Logf("Variable remapping: %+v", variablesMap) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "employees-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"findEmployees"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Employee", FieldNames: []string{"id", "details"}}, + {TypeName: "EmployeeDetails", FieldNames: []string{"forename", "surname"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "FindEmployeesQuery", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + // Test with remapping - should correctly find variables and track usage + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, vars, generatedPlan, variablesMap) + require.NoError(t, err) + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, variablesMap) + require.NoError(t, err) + + // Verify argument usage tracks both arguments + // One should be null (employeeStatus), one should be non-null (searchCriteria) + require.Len(t, argumentUsageInfo, 2, "Should track both arguments") + + var criteriaArg, statusArg *graphqlmetricsv1.ArgumentUsageInfo + for _, arg := range argumentUsageInfo { + switch arg.NamedType { + case "SearchInput": + criteriaArg = arg + case "String": + statusArg = arg + } + } + + require.NotNil(t, criteriaArg, "Should find criteria argument") + require.NotNil(t, statusArg, "Should find status argument") + + // Verify criteria argument (non-null input object) + assert.Equal(t, "Query", criteriaArg.TypeName) + assert.Equal(t, "SearchInput", criteriaArg.NamedType) + assert.Equal(t, []string{"findEmployees", "criteria"}, criteriaArg.Path) + assert.False(t, criteriaArg.IsNull, "searchCriteria should not be null") + + // Verify status argument (null string) + assert.Equal(t, "Query", statusArg.TypeName) + assert.Equal(t, "String", statusArg.NamedType) + assert.Equal(t, []string{"findEmployees", "status"}, statusArg.Path) + assert.True(t, statusArg.IsNull, "employeeStatus should be null - this is the critical test for remapping!") + + // Verify input usage tracks the input object and its fields + require.GreaterOrEqual(t, len(inputUsageInfo), 4, "Should track SearchInput and its fields") + + // Find the root SearchInput type + var searchInputRoot *graphqlmetricsv1.InputUsageInfo + for _, input := range inputUsageInfo { + if input.NamedType == "SearchInput" && len(input.Path) == 1 { + searchInputRoot = input + break + } + } + require.NotNil(t, searchInputRoot, "Should track root SearchInput type") + assert.False(t, searchInputRoot.IsNull, "SearchInput should not be null") + + // Verify individual fields were tracked + fieldMap := make(map[string]*graphqlmetricsv1.InputUsageInfo) + for _, input := range inputUsageInfo { + if input.TypeName == "SearchInput" && len(input.Path) == 2 { + fieldMap[input.Path[1]] = input + } + } + + // All fields should be present and non-null + assert.Contains(t, fieldMap, "department", "Should track department field") + assert.Contains(t, fieldMap, "minAge", "Should track minAge field") + assert.Contains(t, fieldMap, "active", "Should track active field") + + if departmentField, ok := fieldMap["department"]; ok { + assert.Equal(t, "String", departmentField.NamedType) + assert.False(t, departmentField.IsNull, "department has a value") + } + + if minAgeField, ok := fieldMap["minAge"]; ok { + assert.Equal(t, "Int", minAgeField.NamedType) + assert.False(t, minAgeField.IsNull, "minAge has a value") + } + + if activeField, ok := fieldMap["active"]; ok { + assert.Equal(t, "Boolean", activeField.NamedType) + assert.False(t, activeField.IsNull, "active has a value") + } + + // Test without remapping - should fail to find variables correctly + argumentUsageInfoNoRemap, err := GetArgumentUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Without remapping, null detection for variable-based arguments won't work correctly + // because the AST uses normalized names but variables JSON uses original names + var statusArgNoRemap *graphqlmetricsv1.ArgumentUsageInfo + for _, arg := range argumentUsageInfoNoRemap { + if arg.NamedType == "String" { + statusArgNoRemap = arg + break + } + } + + // Without remapping, we can't correctly detect the null status because we can't find + // the variable value (AST has normalized name, JSON has original name) + // This demonstrates why remapping is critical + if statusArgNoRemap != nil { + // The behavior without remapping: can't find the variable, so defaults to false + assert.False(t, statusArgNoRemap.IsNull, "Without remapping, can't correctly detect null status") + } +} + +// TestImplicitNullArguments verifies that arguments are tracked even when not provided in the operation. +// This is critical for breaking change detection - we need to know if optional arguments are being used. +func TestImplicitNullArguments(t *testing.T) { + t.Run("no arguments provided", func(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + findEmployees(criteria: SearchInput, status: String, limit: Int): String + } + + input SearchInput { + department: String + } + ` + + // Operation WITHOUT any arguments - all should be tracked as implicitly null + operation := ` + query FindEmployees { + findEmployees + } + ` + + variables := `{}` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "employees-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"findEmployees"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "FindEmployees", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Should track ALL three arguments even though none were provided + require.Len(t, argumentUsageInfo, 3, "Should track all 3 arguments (criteria, status, limit) even though none were provided") + + // Verify all arguments are tracked as implicitly null + argumentMap := make(map[string]*graphqlmetricsv1.ArgumentUsageInfo) + for _, arg := range argumentUsageInfo { + if len(arg.Path) == 2 && arg.Path[0] == "findEmployees" { + argumentMap[arg.Path[1]] = arg + } + } + + // Verify criteria argument (SearchInput) + require.Contains(t, argumentMap, "criteria", "Should track criteria argument") + criteriaArg := argumentMap["criteria"] + assert.Equal(t, "Query", criteriaArg.TypeName) + assert.Equal(t, "SearchInput", criteriaArg.NamedType) + assert.Equal(t, []string{"findEmployees", "criteria"}, criteriaArg.Path) + assert.True(t, criteriaArg.IsNull, "criteria should be implicitly null (not provided)") + + // Verify status argument (String) + require.Contains(t, argumentMap, "status", "Should track status argument") + statusArg := argumentMap["status"] + assert.Equal(t, "Query", statusArg.TypeName) + assert.Equal(t, "String", statusArg.NamedType) + assert.Equal(t, []string{"findEmployees", "status"}, statusArg.Path) + assert.True(t, statusArg.IsNull, "status should be implicitly null (not provided)") + + // Verify limit argument (Int) + require.Contains(t, argumentMap, "limit", "Should track limit argument") + limitArg := argumentMap["limit"] + assert.Equal(t, "Query", limitArg.TypeName) + assert.Equal(t, "Int", limitArg.NamedType) + assert.Equal(t, []string{"findEmployees", "limit"}, limitArg.Path) + assert.True(t, limitArg.IsNull, "limit should be implicitly null (not provided)") + }) + + t.Run("mixed - some arguments provided, some not", func(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + findEmployees(criteria: SearchInput, status: String, limit: Int): String + } + + input SearchInput { + department: String + } + ` + + // Operation with only 'status' argument - criteria and limit should be tracked as implicit nulls + operation := ` + query FindEmployees { + findEmployees(status: "active") + } + ` + + variables := `{}` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "employees-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"findEmployees"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "FindEmployees", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Should track ALL three arguments: status (explicit), criteria & limit (implicit) + require.Len(t, argumentUsageInfo, 3, "Should track all 3 arguments") + + // Verify argument tracking + argumentMap := make(map[string]*graphqlmetricsv1.ArgumentUsageInfo) + for _, arg := range argumentUsageInfo { + if len(arg.Path) == 2 && arg.Path[0] == "findEmployees" { + argumentMap[arg.Path[1]] = arg + } + } + + // Verify status argument (provided explicitly with value) + require.Contains(t, argumentMap, "status") + statusArg := argumentMap["status"] + assert.Equal(t, "String", statusArg.NamedType) + assert.False(t, statusArg.IsNull, "status was provided with value") + + // Verify criteria argument (not provided - implicit null) + require.Contains(t, argumentMap, "criteria") + criteriaArg := argumentMap["criteria"] + assert.Equal(t, "SearchInput", criteriaArg.NamedType) + assert.True(t, criteriaArg.IsNull, "criteria should be implicitly null (not provided)") + + // Verify limit argument (not provided - implicit null) + require.Contains(t, argumentMap, "limit") + limitArg := argumentMap["limit"] + assert.Equal(t, "Int", limitArg.NamedType) + assert.True(t, limitArg.IsNull, "limit should be implicitly null (not provided)") + }) +} + +// TestImplicitInputTypeArgumentUsage verifies that when an input type argument is not provided, +// we track input usage for that type with IsNull: true for breaking change detection. +func TestImplicitInputTypeArgumentUsage(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + findEmployees(criteria: SearchInput, status: String, limit: Int): [Employee!]! + } + + type Employee { + id: ID! + details: EmployeeDetails + } + + type EmployeeDetails { + forename: String + } + + input SearchInput { + department: String + title: String + } + ` + + // Operation without providing the SearchInput argument + operation := ` + query FindEmployees { + findEmployees { + id + details { + forename + } + } + } + ` + + variables := `{}` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "employees-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"findEmployees"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Employee", FieldNames: []string{"id", "details"}}, + {TypeName: "EmployeeDetails", FieldNames: []string{"forename"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "FindEmployees", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + // Get argument usage - should include implicit nulls for criteria, status, limit + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Get input usage - should include SearchInput from the implicitly null criteria argument + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Verify argument usage includes all three arguments as implicitly null + require.Len(t, argumentUsageInfo, 3, "Should track all 3 arguments (criteria, status, limit)") + + var criteriaArg *graphqlmetricsv1.ArgumentUsageInfo + for _, arg := range argumentUsageInfo { + if len(arg.Path) == 2 && arg.Path[0] == "findEmployees" && arg.Path[1] == "criteria" { + criteriaArg = arg + break + } + } + require.NotNil(t, criteriaArg, "Should find criteria argument") + assert.Equal(t, "SearchInput", criteriaArg.NamedType) + assert.True(t, criteriaArg.IsNull, "criteria should be implicitly null") + + // CRITICAL: Verify input usage includes SearchInput from the implicitly null criteria argument + var searchInputUsage *graphqlmetricsv1.InputUsageInfo + for _, input := range inputUsageInfo { + if input.NamedType == "SearchInput" && len(input.Path) == 1 && input.Path[0] == "SearchInput" { + searchInputUsage = input + break + } + } + require.NotNil(t, searchInputUsage, "Should track input usage for SearchInput type even though argument wasn't provided") + assert.Equal(t, "SearchInput", searchInputUsage.NamedType) + assert.Equal(t, []string{"SearchInput"}, searchInputUsage.Path) + assert.True(t, searchInputUsage.IsNull, "SearchInput should be marked as null since argument wasn't provided") + assert.Equal(t, []string{"employees-subgraph"}, searchInputUsage.SubgraphIDs, "Should have correct subgraph ID") +} + +// TestSharedVariableAcrossSubgraphs verifies that when a variable is used by multiple fields +// from different subgraphs, the variable's input usage is attributed to all subgraphs (merged). +func TestSharedVariableAcrossSubgraphs(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + user(id: ID!): User + product(id: ID!): Product + order(id: ID!): Order + } + + type User { + id: ID! + name: String! + } + + type Product { + id: ID! + title: String! + } + + type Order { + id: ID! + status: String! + } + ` + + // Single variable $sharedId is used by three fields from three different subgraphs + operation := ` + query GetData($sharedId: ID!) { + user(id: $sharedId) { + id + name + } + product(id: $sharedId) { + id + title + } + order(id: $sharedId) { + id + status + } + } + ` + + variables := `{"sharedId": "123"}` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + // Create three subgraphs - each serving one root field + usersSubgraph, err := plan.NewDataSourceConfiguration[any]( + "users-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"user"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "User", FieldNames: []string{"id", "name"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + productsSubgraph, err := plan.NewDataSourceConfiguration[any]( + "products-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"product"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Product", FieldNames: []string{"id", "title"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + ordersSubgraph, err := plan.NewDataSourceConfiguration[any]( + "orders-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"order"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Order", FieldNames: []string{"id", "status"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{usersSubgraph, productsSubgraph, ordersSubgraph}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "GetData", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // The $sharedId variable is used by user(id:), product(id:), and order(id:) + // Each from a different subgraph, so we expect THREE argument entries + expectedArgumentUsageInfo := []*graphqlmetricsv1.ArgumentUsageInfo{ + { + TypeName: "Query", + NamedType: "ID", + Path: []string{"user", "id"}, + SubgraphIDs: []string{"users-subgraph"}, + IsNull: false, + }, + { + TypeName: "Query", + NamedType: "ID", + Path: []string{"product", "id"}, + SubgraphIDs: []string{"products-subgraph"}, + IsNull: false, + }, + { + TypeName: "Query", + NamedType: "ID", + Path: []string{"order", "id"}, + SubgraphIDs: []string{"orders-subgraph"}, + IsNull: false, + }, + } + + // The $sharedId variable's input usage should be attributed to ALL THREE subgraphs + // This is the critical test: mergeSubgraphIDs should combine all three + expectedInputUsageInfo := []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "ID", + Path: []string{"ID"}, + // MERGED: All three subgraphs that use this variable + SubgraphIDs: []string{"users-subgraph", "products-subgraph", "orders-subgraph"}, + IsNull: false, + }, + } + + // Verify argument usage + assert.Len(t, argumentUsageInfo, len(expectedArgumentUsageInfo)) + for i := range expectedArgumentUsageInfo { + assert.JSONEq(t, prettyJSON(t, expectedArgumentUsageInfo[i]), prettyJSON(t, argumentUsageInfo[i]), + "argumentUsageInfo[%d]", i) + } + + // Verify input usage - the critical assertion + assert.Len(t, inputUsageInfo, len(expectedInputUsageInfo), "Should have one input usage entry for the shared variable") + + // The input usage should have all three subgraph IDs merged + actualInput := inputUsageInfo[0] + assert.Equal(t, "ID", actualInput.NamedType, "Input type should be ID") + assert.Equal(t, []string{"ID"}, actualInput.Path, "Input path should be [ID]") + assert.False(t, actualInput.IsNull, "Input should not be null") + + // Critical assertion: verify all three subgraphs are present (order-independent) + assert.ElementsMatch(t, expectedInputUsageInfo[0].SubgraphIDs, actualInput.SubgraphIDs, + "Input usage should be attributed to all three subgraphs that use the variable") + + // Verify we have exactly 3 subgraphs (no duplicates) + assert.Len(t, actualInput.SubgraphIDs, 3, "Should have exactly 3 subgraph IDs (no duplicates)") +} + func prettyJSON(t *testing.T, v interface{}) string { b, err := json.MarshalIndent(v, "", " ") require.NoError(t, err) From 70dc6df05742466a381f81836f1201005522bc00 Mon Sep 17 00:00:00 2001 From: StarpTech Date: Sat, 22 Nov 2025 13:37:55 +0100 Subject: [PATCH 05/22] chore: add more tests --- .../graphqlschemausage/schemausage_test.go | 204 ++++++++++++++++++ 1 file changed, 204 insertions(+) diff --git a/router/pkg/graphqlschemausage/schemausage_test.go b/router/pkg/graphqlschemausage/schemausage_test.go index c37cfccbe7..128a43d779 100644 --- a/router/pkg/graphqlschemausage/schemausage_test.go +++ b/router/pkg/graphqlschemausage/schemausage_test.go @@ -4,6 +4,7 @@ import ( "bytes" "context" "encoding/json" + "strings" "testing" "github.com/jensneuse/abstractlogger" @@ -2289,6 +2290,209 @@ func TestImplicitInputTypeArgumentUsage(t *testing.T) { assert.Equal(t, []string{"employees-subgraph"}, searchInputUsage.SubgraphIDs, "Should have correct subgraph ID") } +// TestSharedInputObjectAcrossSubgraphs verifies that when an input object variable is used by +// multiple fields from different subgraphs, the input usage (including nested fields) is +// attributed to all subgraphs that use it (merged). +func TestSharedInputObjectAcrossSubgraphs(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + findUsers(criteria: SearchInput!): [User!]! + findProducts(criteria: SearchInput!): [Product!]! + findOrders(criteria: SearchInput!): [Order!]! + } + + type User { + id: ID! + name: String! + } + + type Product { + id: ID! + title: String! + } + + type Order { + id: ID! + status: String! + } + + input SearchInput { + keyword: String + category: String + limit: Int + } + ` + + // Single input object variable used by three fields from three different subgraphs + operation := ` + query Search($criteria: SearchInput!) { + findUsers(criteria: $criteria) { + id + name + } + findProducts(criteria: $criteria) { + id + title + } + findOrders(criteria: $criteria) { + id + status + } + } + ` + + variables := `{"criteria": {"keyword": "test", "category": "electronics"}}` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + // Create three subgraphs - each serving one root field + usersSubgraph, err := plan.NewDataSourceConfiguration[any]( + "users-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"findUsers"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "User", FieldNames: []string{"id", "name"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + productsSubgraph, err := plan.NewDataSourceConfiguration[any]( + "products-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"findProducts"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Product", FieldNames: []string{"id", "title"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + ordersSubgraph, err := plan.NewDataSourceConfiguration[any]( + "orders-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"findOrders"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Order", FieldNames: []string{"id", "status"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{usersSubgraph, productsSubgraph, ordersSubgraph}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "Search", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // The $criteria variable is used by findUsers, findProducts, and findOrders + // Each from a different subgraph, so we expect THREE argument entries + require.Len(t, argumentUsageInfo, 3, "Should have 3 argument usage entries") + + // Verify each argument has its own subgraph + argumentsByField := make(map[string]*graphqlmetricsv1.ArgumentUsageInfo) + for _, arg := range argumentUsageInfo { + if len(arg.Path) == 2 && arg.Path[1] == "criteria" { + argumentsByField[arg.Path[0]] = arg + } + } + + require.Contains(t, argumentsByField, "findUsers") + require.Contains(t, argumentsByField, "findProducts") + require.Contains(t, argumentsByField, "findOrders") + + assert.Equal(t, []string{"users-subgraph"}, argumentsByField["findUsers"].SubgraphIDs) + assert.Equal(t, []string{"products-subgraph"}, argumentsByField["findProducts"].SubgraphIDs) + assert.Equal(t, []string{"orders-subgraph"}, argumentsByField["findOrders"].SubgraphIDs) + + // CRITICAL: Input usage should merge all three subgraphs + // We should have entries for: + // 1. SearchInput (root) - merged subgraphs + // 2. SearchInput.keyword - merged subgraphs + // 3. SearchInput.category - merged subgraphs + // 4. SearchInput.limit (implicit null) - merged subgraphs + + inputsByPath := make(map[string]*graphqlmetricsv1.InputUsageInfo) + for _, input := range inputUsageInfo { + pathKey := strings.Join(input.Path, ".") + inputsByPath[pathKey] = input + } + + // Verify root SearchInput has all three subgraphs merged + require.Contains(t, inputsByPath, "SearchInput", "Should track root SearchInput") + searchInputRoot := inputsByPath["SearchInput"] + assert.Equal(t, "SearchInput", searchInputRoot.NamedType) + assert.False(t, searchInputRoot.IsNull) + assert.ElementsMatch(t, []string{"users-subgraph", "products-subgraph", "orders-subgraph"}, + searchInputRoot.SubgraphIDs, "Root SearchInput should have all three subgraphs merged") + assert.Len(t, searchInputRoot.SubgraphIDs, 3, "Should have exactly 3 subgraphs (no duplicates)") + + // Verify keyword field has all three subgraphs merged + require.Contains(t, inputsByPath, "SearchInput.keyword", "Should track SearchInput.keyword") + keywordField := inputsByPath["SearchInput.keyword"] + assert.Equal(t, "String", keywordField.NamedType) + assert.False(t, keywordField.IsNull) + assert.ElementsMatch(t, []string{"users-subgraph", "products-subgraph", "orders-subgraph"}, + keywordField.SubgraphIDs, "keyword field should have all three subgraphs merged") + + // Verify category field has all three subgraphs merged + require.Contains(t, inputsByPath, "SearchInput.category", "Should track SearchInput.category") + categoryField := inputsByPath["SearchInput.category"] + assert.Equal(t, "String", categoryField.NamedType) + assert.False(t, categoryField.IsNull) + assert.ElementsMatch(t, []string{"users-subgraph", "products-subgraph", "orders-subgraph"}, + categoryField.SubgraphIDs, "category field should have all three subgraphs merged") + + // Verify implicit null field (limit) has all three subgraphs merged + require.Contains(t, inputsByPath, "SearchInput.limit", "Should track implicitly null SearchInput.limit") + limitField := inputsByPath["SearchInput.limit"] + assert.Equal(t, "Int", limitField.NamedType) + assert.True(t, limitField.IsNull, "limit should be implicitly null (not provided)") + assert.ElementsMatch(t, []string{"users-subgraph", "products-subgraph", "orders-subgraph"}, + limitField.SubgraphIDs, "implicit null field should also have all three subgraphs merged") +} + // TestSharedVariableAcrossSubgraphs verifies that when a variable is used by multiple fields // from different subgraphs, the variable's input usage is attributed to all subgraphs (merged). func TestSharedVariableAcrossSubgraphs(t *testing.T) { From bbe19faa57c1534b1c261b08bd85ccb56e208ced Mon Sep 17 00:00:00 2001 From: StarpTech Date: Mon, 24 Nov 2025 15:46:03 +0100 Subject: [PATCH 06/22] chore: track input usage also when variables are empty --- router/pkg/graphqlschemausage/schemausage.go | 21 +++- .../graphqlschemausage/schemausage_test.go | 104 ++++++++++++++++++ 2 files changed, 123 insertions(+), 2 deletions(-) diff --git a/router/pkg/graphqlschemausage/schemausage.go b/router/pkg/graphqlschemausage/schemausage.go index 7468b2a9a9..8489b6e2c9 100644 --- a/router/pkg/graphqlschemausage/schemausage.go +++ b/router/pkg/graphqlschemausage/schemausage.go @@ -115,7 +115,7 @@ func GetInputUsageInfo(operation, definition *ast.Document, variables *astjson.V // Track input usage from variable definitions for i := range operation.VariableDefinitions { - processVariableDefinition(traverser, operation, variables, nullDetector, i) + processVariableDefinition(traverser, operation, definition, variables, nullDetector, subgraphMapper, i) } // Track input usage from implicitly null input type arguments @@ -877,7 +877,8 @@ func (t *inputTraverser) infoEquals(a, b *graphqlmetrics.InputUsageInfo) bool { // ============================================ // processVariableDefinition processes a variable definition and initiates input traversal. -func processVariableDefinition(traverser *inputTraverser, operation *ast.Document, variables *astjson.Value, nullDetector *nullValueDetector, ref int) { +// Tracks input usage even when the variable is not provided in the variables JSON (empty variables). +func processVariableDefinition(traverser *inputTraverser, operation, definition *ast.Document, variables *astjson.Value, nullDetector *nullValueDetector, subgraphMapper *subgraphMapper, ref int) { varDef := operation.VariableDefinitions[ref] varTypeRef := varDef.Type varTypeName := operation.ResolveTypeNameString(varTypeRef) @@ -891,6 +892,22 @@ func processVariableDefinition(traverser *inputTraverser, operation *ast.Documen // Look up the variable value jsonField := variables.Get(originalVarName) if jsonField == nil { + // Variable is not provided in variables JSON - still track input type usage if it's an input object type + // This is important for breaking change detection + defNode, ok := definition.NodeByNameStr(varTypeName) + if ok && defNode.Kind == ast.NodeKindInputObjectTypeDefinition { + // Use normalized name for subgraph lookup + traverser.currentVariableName = normalizedVarName + subgraphIDs := subgraphMapper.getVariableSubgraphs(normalizedVarName) + + // Track the input type as implicitly null (variable not provided) + traverser.appendUniqueUsage(&graphqlmetrics.InputUsageInfo{ + NamedType: varTypeName, + Path: []string{varTypeName}, + SubgraphIDs: subgraphIDs, + IsNull: true, // Variable not provided + }) + } return } diff --git a/router/pkg/graphqlschemausage/schemausage_test.go b/router/pkg/graphqlschemausage/schemausage_test.go index 128a43d779..29c652bdcd 100644 --- a/router/pkg/graphqlschemausage/schemausage_test.go +++ b/router/pkg/graphqlschemausage/schemausage_test.go @@ -2290,6 +2290,110 @@ func TestImplicitInputTypeArgumentUsage(t *testing.T) { assert.Equal(t, []string{"employees-subgraph"}, searchInputUsage.SubgraphIDs, "Should have correct subgraph ID") } +// TestInputUsageWithEmptyVariables verifies that when a variable is defined and used in an argument, +// but the variables JSON is empty, we still track the input type usage with IsNull: true. +func TestInputUsageWithEmptyVariables(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + findEmployees(criteria: SearchInput): [Employee!]! + } + + type Employee { + id: ID! + details: EmployeeDetails + } + + type EmployeeDetails { + forename: String + } + + input SearchInput { + department: String + title: String + } + ` + + // Operation with variable defined and used in argument, but variables JSON will be empty + operation := ` + query FindEmployeesWithVariable($criteria: SearchInput) { + findEmployees(criteria: $criteria) { + id + details { + forename + } + } + } + ` + + variables := `{}` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "employees-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"findEmployees"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Employee", FieldNames: []string{"id", "details"}}, + {TypeName: "EmployeeDetails", FieldNames: []string{"forename"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "FindEmployeesWithVariable", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + // Get input usage - should include SearchInput even though variable is not provided + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Verify input usage includes SearchInput from the variable definition + var searchInputUsage *graphqlmetricsv1.InputUsageInfo + for _, input := range inputUsageInfo { + if input.NamedType == "SearchInput" && len(input.Path) == 1 && input.Path[0] == "SearchInput" { + searchInputUsage = input + break + } + } + require.NotNil(t, searchInputUsage, "Should track input usage for SearchInput type even though variable is not provided in empty variables JSON") + assert.Equal(t, "SearchInput", searchInputUsage.NamedType) + assert.Equal(t, []string{"SearchInput"}, searchInputUsage.Path) + assert.True(t, searchInputUsage.IsNull, "SearchInput should be marked as null since variable is not provided") + assert.Equal(t, []string{"employees-subgraph"}, searchInputUsage.SubgraphIDs, "Should have correct subgraph ID") +} + // TestSharedInputObjectAcrossSubgraphs verifies that when an input object variable is used by // multiple fields from different subgraphs, the input usage (including nested fields) is // attributed to all subgraphs that use it (merged). From 4678124ee44cffc85735f9341d0469e8961a4bcc Mon Sep 17 00:00:00 2001 From: JivusAyrus Date: Fri, 28 Nov 2025 17:31:31 +0530 Subject: [PATCH 07/22] fix: operation checks for input and argument usage --- controlplane/src/core/composition/composer.ts | 4 +- .../repositories/SchemaCheckRepository.ts | 9 +- .../core/repositories/SubgraphRepository.ts | 11 +- .../SchemaUsageTrafficInspector.test.ts | 272 +++++++++- .../services/SchemaUsageTrafficInspector.ts | 470 ++++++++++++++++-- 5 files changed, 687 insertions(+), 79 deletions(-) diff --git a/controlplane/src/core/composition/composer.ts b/controlplane/src/core/composition/composer.ts index afc22b2237..ee07221b7e 100644 --- a/controlplane/src/core/composition/composer.ts +++ b/controlplane/src/core/composition/composer.ts @@ -44,7 +44,7 @@ import * as schema from '../../db/schema.js'; import { ClickHouseClient } from '../clickhouse/index.js'; import { CacheWarmerRepository } from '../repositories/CacheWarmerRepository.js'; import { NamespaceRepository } from '../repositories/NamespaceRepository.js'; -import { InspectorSchemaChange } from '../services/SchemaUsageTrafficInspector.js'; +import { InspectorSchemaChangeGroup } from '../services/SchemaUsageTrafficInspector.js'; import { SchemaCheckChangeAction } from '../../db/models.js'; import { composeFederatedGraphWithPotentialContracts, composeSubgraphs } from './composition.js'; import { getDiffBetweenGraphs, GetDiffBetweenGraphsResult, GetDiffBetweenGraphsSuccess } from './schemaCheck.js'; @@ -262,7 +262,7 @@ export type CheckSubgraph = { checkSubgraphId: string; newSchemaSDL: string; newGraphQLSchema?: GraphQLSchema; - inspectorChanges: InspectorSchemaChange[]; + inspectorChanges: InspectorSchemaChangeGroup[]; schemaChanges: GetDiffBetweenGraphsSuccess; storedBreakingChanges: SchemaCheckChangeAction[]; routerCompatibilityVersion: string; diff --git a/controlplane/src/core/repositories/SchemaCheckRepository.ts b/controlplane/src/core/repositories/SchemaCheckRepository.ts index 9da31f4c84..3bf3bbf700 100644 --- a/controlplane/src/core/repositories/SchemaCheckRepository.ts +++ b/controlplane/src/core/repositories/SchemaCheckRepository.ts @@ -40,7 +40,7 @@ import { getDiffBetweenGraphs, SchemaDiff } from '../composition/schemaCheck.js' import { collectOperationUsageStats, InspectorOperationResult, - InspectorSchemaChange, + InspectorSchemaChangeGroup, SchemaUsageTrafficInspector, } from '../services/SchemaUsageTrafficInspector.js'; import { @@ -902,7 +902,7 @@ export class SchemaCheckRepository { schemaCheckSubgraphId, }); - let inspectorChanges: InspectorSchemaChange[] = []; + let inspectorChanges: InspectorSchemaChangeGroup[] = []; // For operations checks we only consider breaking changes inspectorChanges = trafficInspector.schemaChangesToInspectorChanges( schemaChanges.breakingChanges, @@ -1040,7 +1040,10 @@ export class SchemaCheckRepository { That means any breaking change is really breaking */ for (const [subgraphName, checkSubgraph] of checkSubgraphs.entries()) { - if (composition.errors.length > 0 || checkSubgraph.inspectorChanges.length === 0) { + if ( + composition.errors.length > 0 || + checkSubgraph.inspectorChanges.every((group) => group.changes.length === 0) + ) { continue; } diff --git a/controlplane/src/core/repositories/SubgraphRepository.ts b/controlplane/src/core/repositories/SubgraphRepository.ts index 60ab3815a8..523de96f42 100644 --- a/controlplane/src/core/repositories/SubgraphRepository.ts +++ b/controlplane/src/core/repositories/SubgraphRepository.ts @@ -53,7 +53,7 @@ import { RBACEvaluator } from '../services/RBACEvaluator.js'; import { collectOperationUsageStats, InspectorOperationResult, - InspectorSchemaChange, + InspectorSchemaChangeGroup, SchemaUsageTrafficInspector, } from '../services/SchemaUsageTrafficInspector.js'; import { @@ -2003,7 +2003,7 @@ export class SubgraphRepository { const compositionErrors: PlainMessage[] = []; const compositionWarnings: PlainMessage[] = []; - let inspectorChanges: InspectorSchemaChange[] = []; + let inspectorChanges: InspectorSchemaChangeGroup[] = []; // For operations checks we only consider breaking changes inspectorChanges = trafficInspector.schemaChangesToInspectorChanges( @@ -2037,7 +2037,12 @@ export class SubgraphRepository { 3. When user wants to skip the traffic check altogether That means any breaking change is really breaking */ - if (composedGraph.errors.length > 0 || inspectorChanges.length === 0 || skipTrafficCheck || !subgraph) { + if ( + composedGraph.errors.length > 0 || + inspectorChanges.every((group) => group.changes.length === 0) || + skipTrafficCheck || + !subgraph + ) { continue; } diff --git a/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts b/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts index 2f110c3fdd..ca18430b24 100644 --- a/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts +++ b/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts @@ -1,7 +1,7 @@ import { describe, expect, test } from 'vitest'; import { buildSchema, GraphQLSchema } from 'graphql'; import { getSchemaDiff } from '../composition/schemaCheck.js'; -import { InspectorSchemaChange, toInspectorChange } from './SchemaUsageTrafficInspector.js'; +import { InspectorSchemaChange, InspectorSchemaChangeGroup, toInspectorChange } from './SchemaUsageTrafficInspector.js'; describe('Schema Change converter', (ctx) => { describe('Arguments', (ctx) => { @@ -21,9 +21,7 @@ describe('Schema Change converter', (ctx) => { expect(changes).toEqual([ { - isArgument: true, - path: ['a', 'b'], - schemaChangeId: '0', + path: ['a'], typeName: 'Query', }, ]); @@ -51,13 +49,155 @@ describe('Schema Change converter', (ctx) => { expect(changes).toEqual([ { - isArgument: true, - path: ['details', 'all'], - schemaChangeId: '0', + path: ['details'], typeName: 'Rocket', }, ]); }); + + test('Remove a required argument', async () => { + const a = buildSchema(/* GraphQL */ ` + type Query { + a(b: Boolean!): String + } + `); + const b = buildSchema(/* GraphQL */ ` + type Query { + a: String + } + `); + + const changes = await getBreakingChanges(a, b); + + expect(changes).toEqual([ + { + path: ['a'], + typeName: 'Query', + }, + ]); + }); + + test('Remove an optional argument', async () => { + const a = buildSchema(/* GraphQL */ ` + type Query { + a(b: Boolean): String + } + `); + const b = buildSchema(/* GraphQL */ ` + type Query { + a: String + } + `); + + const changes = await getBreakingChanges(a, b); + + expect(changes).toEqual([ + { + path: ['a', 'b'], + typeName: 'Query', + isArgument: true, + isNull: false, + }, + ]); + }); + + test('Change argument type from optional same to required same', async () => { + const a = buildSchema(/* GraphQL */ ` + type Query { + a(b: Boolean): String + } + `); + const b = buildSchema(/* GraphQL */ ` + type Query { + a(b: Boolean!): String + } + `); + + const changes = await getBreakingChanges(a, b); + + expect(changes).toEqual([ + { + path: ['a', 'b'], + typeName: 'Query', + fieldName: 'a', + isArgument: true, + isNull: true, + }, + ]); + }); + + test('Change argument type from optional different to required different', async () => { + const a = buildSchema(/* GraphQL */ ` + type Query { + a(b: Boolean): String + } + `); + const b = buildSchema(/* GraphQL */ ` + type Query { + a(b: String!): String + } + `); + + const changes = await getBreakingChanges(a, b); + + expect(changes).toEqual([ + { + path: ['a', 'b'], + typeName: 'Query', + fieldName: 'a', + isArgument: true, + }, + ]); + }); + + test('Change argument type from required different to required different', async () => { + const a = buildSchema(/* GraphQL */ ` + type Query { + a(b: Boolean!): String + } + `); + const b = buildSchema(/* GraphQL */ ` + type Query { + a(b: String!): String + } + `); + + const changes = await getBreakingChanges(a, b); + + expect(changes).toEqual([ + { + path: ['a', 'b'], + typeName: 'Query', + fieldName: 'a', + isArgument: true, + }, + ]); + }); + + test('Change argument type from optional different to optional different', async () => { + const a = buildSchema(/* GraphQL */ ` + type Query { + a(b: Boolean): String + } + `); + const b = buildSchema(/* GraphQL */ ` + type Query { + a(b: String): String + } + `); + + const changes = await getBreakingChanges(a, b); + + expect(changes).toEqual([ + { + path: ['a', 'b'], + typeName: 'Query', + fieldName: 'a', + isArgument: true, + isNull: false, + }, + ]); + }); }); describe('Input', (ctx) => { @@ -78,15 +218,36 @@ describe('Schema Change converter', (ctx) => { expect(changes).toEqual([ { - fieldName: 'b', + path: ['Foo'], + isInput: true, + }, + ]); + }); + + test('Remove an Input field', async () => { + const a = buildSchema(/* GraphQL */ ` + input Foo { + a: String! + b: String! + } + `); + const b = buildSchema(/* GraphQL */ ` + input Foo { + a: String! + } + `); + + const changes = await getBreakingChanges(a, b); + + expect(changes).toEqual([ + { + path: ['Foo'], isInput: true, - schemaChangeId: '0', - typeName: 'Foo', }, ]); }); - test('Change the type of an Input field', async () => { + test('Change input field type from required different to required different', async () => { const a = buildSchema(/* GraphQL */ ` input Foo { a: String! @@ -102,10 +263,83 @@ describe('Schema Change converter', (ctx) => { expect(changes).toEqual([ { + path: ['Foo'], + isInput: true, + }, + ]); + }); + + test('Change input field type from optional same to required same', async () => { + const a = buildSchema(/* GraphQL */ ` + input Foo { + a: String + } + `); + const b = buildSchema(/* GraphQL */ ` + input Foo { + a: String! + } + `); + + const changes = await getBreakingChanges(a, b); + + expect(changes).toEqual([ + { + typeName: 'Foo', fieldName: 'a', isInput: true, - schemaChangeId: '0', + isNull: true, + }, + { + path: ['Foo'], + isInput: true, + isNull: true, + }, + ]); + }); + + test('Change input field type from optional different to required different', async () => { + const a = buildSchema(/* GraphQL */ ` + input Foo { + a: String + } + `); + const b = buildSchema(/* GraphQL */ ` + input Foo { + a: Int! + } + `); + + const changes = await getBreakingChanges(a, b); + + expect(changes).toEqual([ + { + path: ['Foo'], + isInput: true, + }, + ]); + }); + + test('Change input field type from optional different to optional different', async () => { + const a = buildSchema(/* GraphQL */ ` + input Foo { + a: String + } + `); + const b = buildSchema(/* GraphQL */ ` + input Foo { + a: Int + } + `); + + const changes = await getBreakingChanges(a, b); + + expect(changes).toEqual([ + { typeName: 'Foo', + fieldName: 'a', + isInput: true, + isNull: false, }, ]); }); @@ -131,12 +365,10 @@ describe('Schema Change converter', (ctx) => { expect(changes).toEqual([ { - schemaChangeId: '0', typeName: 'Rocket', }, { fieldName: 'a', - schemaChangeId: '1', typeName: 'Query', }, ]); @@ -171,7 +403,6 @@ describe('Schema Change converter', (ctx) => { expect(changes).toEqual([ { namedType: 'enumA', - schemaChangeId: '0', }, ]); }); @@ -199,7 +430,7 @@ describe('Schema Change converter', (ctx) => { async function getBreakingChanges(a: GraphQLSchema, b: GraphQLSchema): Promise { const changes = await getSchemaDiff(a, b); - return changes + const groups = changes .map((c, i) => toInspectorChange( { @@ -211,5 +442,12 @@ async function getBreakingChanges(a: GraphQLSchema, b: GraphQLSchema): Promise c !== null) as InspectorSchemaChange[]; + .filter((c) => c !== null) as InspectorSchemaChangeGroup[]; + + // Flatten groups + const result: InspectorSchemaChange[] = []; + for (const group of groups) { + result.push(...group.changes); + } + return result; } diff --git a/controlplane/src/core/services/SchemaUsageTrafficInspector.ts b/controlplane/src/core/services/SchemaUsageTrafficInspector.ts index 622f00572b..67e8aa1ac4 100644 --- a/controlplane/src/core/services/SchemaUsageTrafficInspector.ts +++ b/controlplane/src/core/services/SchemaUsageTrafficInspector.ts @@ -3,14 +3,179 @@ import { ClickHouseClient } from '../clickhouse/index.js'; import { SchemaDiff } from '../composition/schemaCheck.js'; import { SchemaCheckChangeAction } from '../../db/models.js'; +export enum FieldTypeChangeCategory { + /** + * Optional same type -> Required same type + * Example: "Boolean" -> "Boolean!" + */ + OPTIONAL_SAME_TO_REQUIRED_SAME = 'OPTIONAL_SAME_TO_REQUIRED_SAME', + /** + * Optional different type -> Required different type + * Example: "Boolean" -> "String!" + */ + OPTIONAL_DIFFERENT_TO_REQUIRED_DIFFERENT = 'OPTIONAL_DIFFERENT_TO_REQUIRED_DIFFERENT', + /** + * Required different type -> Required different type + * Example: "Boolean!" -> "String!" + */ + REQUIRED_DIFFERENT_TO_REQUIRED_DIFFERENT = 'REQUIRED_DIFFERENT_TO_REQUIRED_DIFFERENT', + /** + * Optional different type -> Optional different type + * Example: "Boolean" -> "String" + */ + OPTIONAL_DIFFERENT_TO_OPTIONAL_DIFFERENT = 'OPTIONAL_DIFFERENT_TO_OPTIONAL_DIFFERENT', +} + +/** + * Extract base types by removing: + * - Trailing "!" (required indicator) + * - Array brackets like "[Boolean!]" -> "Boolean!" + * - Inner "!" from arrays like "[Boolean!]" -> "Boolean" + */ +function extractBaseType(type: string): string { + let base = type.trim(); + + // Remove trailing "!" + if (base.endsWith('!')) { + base = base.slice(0, -1).trim(); + } + + // Handle array types like "[Boolean!]" or "[Boolean!]!" + // Remove outer brackets and inner "!" + if (base.startsWith('[') && base.endsWith(']')) { + base = base.slice(1, -1).trim(); + // Remove inner "!" if present + if (base.endsWith('!')) { + base = base.slice(0, -1).trim(); + } + } + + return base; +} + +/** + * Normalize type for structural comparison (remove required indicators but keep structure) + */ +function normalizeType(type: string): string { + let normalized = type.trim(); + // Remove trailing "!" but keep array structure + if (normalized.endsWith('!')) { + normalized = normalized.slice(0, -1).trim(); + } + return normalized; +} + +/** + * Parses an argument removal message and determines if the argument was required. + * + * @param message - String in format: "Argument 'name: Type' was removed from field 'TypeName.fieldName'" + * @returns true if the argument was required (type ends with '!'), false if optional + * + * @example + * parseArgumentRemoval("Argument 'criteria: SearchInput!' was removed from field 'Query.findEmployees'") + * // Returns true (required) + * + * @example + * parseArgumentRemoval("Argument 'criteria: SearchInput' was removed from field 'Query.findEmployees'") + * // Returns false (optional) + */ +export function parseArgumentRemoval(message: string): boolean { + // Extract the argument type from the message + // Format: "Argument 'name: Type' was removed from field '...'" + const match = message.match(/Argument '([^:]+):\s*([^']+)' was removed/); + + if (!match || match.length < 3) { + throw new Error(`Invalid argument removal message format: ${message}`); + } + + const argumentType = match[2].trim(); + + // Check if the type ends with "!" to determine if it was required + return argumentType.endsWith('!'); +} + +/** + * Parses a type change message (for both input fields and arguments) and categorizes it into one of the FieldTypeChangeCategory cases. + * Supports two message formats: + * - "Input field 'TypeName.fieldName' changed type from 'FromType' to 'ToType'" + * - "Type for argument 'name' on field 'TypeName.fieldName' changed from 'FromType' to 'ToType'" + * + * @param message - String in either format above + * @returns The category of the type change + * + * @example + * parseTypeChange("Input field 'SearchInput.hasPets' changed type from 'Boolean!' to '[Boolean!]!'") + * // Returns FieldTypeChangeCategory.REQUIRED_DIFFERENT_TO_REQUIRED_DIFFERENT + * + * @example + * parseTypeChange("Type for argument 'criteria' on field 'Query.findEmployees' changed from 'SearchInput' to 'SearchInput!'") + * // Returns FieldTypeChangeCategory.OPTIONAL_SAME_TO_REQUIRED_SAME + */ +export function parseTypeChange(message: string): FieldTypeChangeCategory { + // Try both patterns: "changed type from" (input fields) and "changed from" (arguments) + const match = + message.match(/changed type from '([^']+)' to '([^']+)'/) || message.match(/changed from '([^']+)' to '([^']+)'/); + + if (!match || match.length < 3) { + throw new Error(`Invalid type change message format: ${message}`); + } + + const fromType = match[1]; + const toType = match[2]; + + // Determine if types are required (end with "!") + const fromRequired = fromType.endsWith('!'); + const toRequired = toType.endsWith('!'); + + const fromBaseType = extractBaseType(fromType); + const toBaseType = extractBaseType(toType); + const fromNormalized = normalizeType(fromType); + const toNormalized = normalizeType(toType); + + // Check if base types are the same AND structure is the same + const sameBaseType = fromBaseType === toBaseType; + const sameStructure = fromNormalized === toNormalized; + + // Types are considered "same" only if both base type and structure match + const sameType = sameBaseType && sameStructure; + + // Categorize based on the 4 cases + if (sameType && !fromRequired && toRequired) { + // Case 1: Optional same type -> Required same type + // Example: "Boolean" -> "Boolean!" + return FieldTypeChangeCategory.OPTIONAL_SAME_TO_REQUIRED_SAME; + } else if (!sameType && !fromRequired && toRequired) { + // Case 2: Optional different type -> Required different type + // Example: "Boolean" -> "String!" + return FieldTypeChangeCategory.OPTIONAL_DIFFERENT_TO_REQUIRED_DIFFERENT; + } else if (!sameType && fromRequired && toRequired) { + // Case 3: Required different type -> Required different type + // Example: "Boolean!" -> "String!" + return FieldTypeChangeCategory.REQUIRED_DIFFERENT_TO_REQUIRED_DIFFERENT; + } else if (!sameType && !fromRequired && !toRequired) { + // Case 4: Optional different type -> Optional different type + // Example: "Boolean" -> "String" + return FieldTypeChangeCategory.OPTIONAL_DIFFERENT_TO_OPTIONAL_DIFFERENT; + } else { + // Edge case: same type, from required, to optional (shouldn't happen in breaking changes) + // Fallback to same type becoming required + return FieldTypeChangeCategory.OPTIONAL_SAME_TO_REQUIRED_SAME; + } +} + export interface InspectorSchemaChange { - schemaChangeId: string; typeName?: string; namedType?: string; fieldName?: string; path?: string[]; isInput?: boolean; isArgument?: boolean; + isNull?: boolean; +} + +export interface InspectorSchemaChangeGroup { + schemaChangeId: string; + changes: InspectorSchemaChange[]; } export interface InspectorFilter { @@ -36,40 +201,54 @@ export class SchemaUsageTrafficInspector { /** * Inspect the usage of a schema change in the last X days on real traffic and return the * affected operations. We will consider all available compositions. + * @param changes - Array of change groups, where each group contains changes that should be queried together with OR conditions */ public async inspect( - changes: InspectorSchemaChange[], + changes: InspectorSchemaChangeGroup[], filter: InspectorFilter, ): Promise> { const results: Map = new Map(); - for (const change of changes) { - const where: string[] = []; - // Used for arguments usage check - if (change.path) { - where.push( - `startsWith(Path, [${change.path.map((seg) => `'${seg}'`).join(',')}]) AND length(Path) = ${ - change.path.length - }`, - ); - } - if (change.namedType) { - where.push(`NamedType = '${change.namedType}'`); - } - if (change.typeName) { - where.push(`hasAny(TypeNames, ['${change.typeName}'])`); + for (const changeGroup of changes) { + if (changeGroup.changes.length === 0) { + continue; } - // fieldName can be empty if a type was removed - if (change.fieldName) { - where.push(`FieldName = '${change.fieldName}'`); - } - if (change.isInput) { - where.push(`IsInput = true`); - } else if (change.isArgument) { - where.push(`IsArgument = true`); + + // Build OR conditions for each change in the group + const orConditions: string[] = []; + + for (const change of changeGroup.changes) { + const where: string[] = []; + // Used for arguments usage check + if (change.path) { + where.push( + `startsWith(Path, [${change.path.map((seg) => `'${seg}'`).join(',')}]) AND length(Path) = ${ + change.path.length + }`, + ); + } + if (change.namedType) { + where.push(`NamedType = '${change.namedType}'`); + } + if (change.typeName) { + where.push(`hasAny(TypeNames, ['${change.typeName}'])`); + } + // fieldName can be empty if a type was removed + if (change.fieldName) { + where.push(`FieldName = '${change.fieldName}'`); + } + if (change.isInput) { + where.push(`IsInput = true`); + } else if (change.isArgument) { + where.push(`IsArgument = true`); + } + where.push(`IsIndirectFieldUsage = false`); + + // Combine all conditions for this change with AND, then wrap in parentheses for OR grouping + orConditions.push(`(${where.join(' AND ')})`); } - where.push(`IsIndirectFieldUsage = false`); + // Build the query with OR conditions for all changes in the group const query = ` SELECT OperationHash as operationHash, last_value(OperationType) as operationType, @@ -83,7 +262,7 @@ export class SchemaUsageTrafficInspector { FederatedGraphID = '${filter.federatedGraphId}' AND hasAny(SubgraphIDs, ['${filter.subgraphId}']) AND OrganizationID = '${filter.organizationId}' AND - ${where.join(' AND ')} + (${orConditions.join(' OR ')}) GROUP BY OperationHash `; @@ -97,7 +276,7 @@ export class SchemaUsageTrafficInspector { if (Array.isArray(res)) { const ops = res.map((r) => ({ - schemaChangeId: change.schemaChangeId, + schemaChangeId: changeGroup.schemaChangeId, hash: r.operationHash, name: r.operationName, type: r.operationType, @@ -107,7 +286,7 @@ export class SchemaUsageTrafficInspector { })); if (ops.length > 0) { - results.set(change.schemaChangeId, [...(results.get(change.schemaChangeId) || []), ...ops]); + results.set(changeGroup.schemaChangeId, [...(results.get(changeGroup.schemaChangeId) || []), ...ops]); } } } @@ -118,11 +297,12 @@ export class SchemaUsageTrafficInspector { /** * Convert schema changes to inspector changes. Will ignore a change if it is not inspectable. * Ultimately, will result in a breaking change because the change is not inspectable with the current implementation. + * Returns an array of change groups, where each group contains changes that should be queried together with OR conditions. */ public schemaChangesToInspectorChanges( schemaChanges: SchemaDiff[], schemaCheckActions: SchemaCheckChangeAction[], - ): InspectorSchemaChange[] { + ): InspectorSchemaChangeGroup[] { const operations = schemaChanges .map((change) => { // find the schema check action that matches the change @@ -135,7 +315,7 @@ export class SchemaUsageTrafficInspector { } return toInspectorChange(change, schemaCheckAction.id); }) - .filter((change) => change !== null) as InspectorSchemaChange[]; + .filter((change) => change !== null) as InspectorSchemaChangeGroup[]; return operations; } @@ -192,8 +372,9 @@ export function collectOperationUsageStats(inspectorResult: InspectorOperationRe /** * Convert a schema change to an inspector change. Throws an error if the change is not supported. * Only breaking changes should be passed to this function because we only care about breaking changes. + * Returns a group of changes that should be queried together in a single query with OR conditions. */ -export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): InspectorSchemaChange | null { +export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): InspectorSchemaChangeGroup | null { const path = change.path.split('.'); switch (change.changeType) { @@ -275,7 +456,11 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In case ChangeType.ObjectTypeInterfaceRemoved: { return { schemaChangeId: schemaCheckId, - typeName: path[0], + changes: [ + { + typeName: path[0], + }, + ], }; } // 1. When a field is removed we know the exact type and field name e.g. 'Engineer.name' @@ -284,8 +469,12 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In case ChangeType.FieldTypeChanged: { return { schemaChangeId: schemaCheckId, - typeName: path[0], - fieldName: path[1], + changes: [ + { + typeName: path[0], + fieldName: path[1], + }, + ], }; } // 1. When an enum value is added or removed, we only know the affected type. This is fine because any change to an enum value is breaking. @@ -295,42 +484,215 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In case ChangeType.EnumValueRemoved: { return { schemaChangeId: schemaCheckId, - namedType: path[0], + changes: [ + { + namedType: path[0], + }, + ], }; } // 1. When the type of input field has changed, we know the exact type name and field name e.g. 'MyInput.name' - case ChangeType.InputFieldTypeChanged: + case ChangeType.InputFieldTypeChanged: { + const inputFieldTypeChangeCategory = parseTypeChange(change.message); + switch (inputFieldTypeChangeCategory) { + case FieldTypeChangeCategory.OPTIONAL_SAME_TO_REQUIRED_SAME: { + // Int -> Int! + return { + schemaChangeId: schemaCheckId, + changes: [ + // if the input is used and the field is not passed, + // but now that it is required, its breaking + { + typeName: path[0], + fieldName: path[1], + isInput: true, + isNull: true, + }, + // thic case will handle when the entire input is passed as null or not passed at all + { + path: [path[0]], + isInput: true, + isNull: true, + }, + ], + }; + } + case FieldTypeChangeCategory.OPTIONAL_DIFFERENT_TO_REQUIRED_DIFFERENT: { + // Int -> Float! + // in this case, all the ops which have this input type are breaking + return { + schemaChangeId: schemaCheckId, + changes: [ + { + path: [path[0]], + isInput: true, + }, + ], + }; + } + case FieldTypeChangeCategory.REQUIRED_DIFFERENT_TO_REQUIRED_DIFFERENT: { + // Int! -> Float! + // in this case, all the ops which have this input type are breaking + return { + schemaChangeId: schemaCheckId, + changes: [ + { + path: [path[0]], + isInput: true, + }, + ], + }; + } + case FieldTypeChangeCategory.OPTIONAL_DIFFERENT_TO_OPTIONAL_DIFFERENT: { + // Int -> Float + // in this case, any ops which use the input field and are not null are breaking + return { + schemaChangeId: schemaCheckId, + changes: [ + { + typeName: path[0], + fieldName: path[1], + isInput: true, + isNull: false, + }, + ], + }; + } + default: { + throw new Error(`Unsupported input field type change category: ${FieldTypeChangeCategory}`); + } + } + } case ChangeType.InputFieldRemoved: case ChangeType.InputFieldAdded: { + // in these cases, all the ops which use this input type are breaking return { schemaChangeId: schemaCheckId, - // passing only the type name, as we want to return all the ops which use this input type. - typeName: path[0], - isInput: true, + changes: [ + { + path: [path[0]], + isInput: true, + }, + ], }; } // 1. When an argument has changed, we know the exact path to the argument e.g. 'Query.engineer.id' // and the type name e.g. 'Query' - case ChangeType.FieldArgumentRemoved: { - return { - schemaChangeId: schemaCheckId, - path: path.slice(1), // The path to the updated argument e.g. 'engineer.name' of the type names - typeName: path[0], // Enclosing type e.g. 'Query' or 'Engineer' when the argument is on a field of type Engineer - isArgument: true, - }; + case ChangeType.FieldArgumentTypeChanged: { + const argumentTypeChangeCategory = parseTypeChange(change.message); + switch (argumentTypeChangeCategory) { + case FieldTypeChangeCategory.OPTIONAL_SAME_TO_REQUIRED_SAME: { + // SearchInput -> SearchInput! + return { + schemaChangeId: schemaCheckId, + changes: [ + // if the argument is used and not passed (null), + // but now that it is required, its breaking + { + path: path.slice(1), // The path to the updated argument e.g. 'engineer.name' of the type names + typeName: path[0], + fieldName: path[2], + isArgument: true, + isNull: true, + }, + ], + }; + } + case FieldTypeChangeCategory.OPTIONAL_DIFFERENT_TO_REQUIRED_DIFFERENT: { + // SearchInput -> String! + // in this case, all the ops which have this argument are breaking + return { + schemaChangeId: schemaCheckId, + changes: [ + { + path: path.slice(1), // The path to the updated argument e.g. 'engineer.name' of the type names + typeName: path[0], + fieldName: path[2], + isArgument: true, + }, + ], + }; + } + case FieldTypeChangeCategory.REQUIRED_DIFFERENT_TO_REQUIRED_DIFFERENT: { + // SearchInput! -> String! + // in this case, all the ops which have this argument are breaking + return { + schemaChangeId: schemaCheckId, + changes: [ + { + path: path.slice(1), // The path to the updated argument e.g. 'engineer.name' of the type names + typeName: path[0], + fieldName: path[2], + isArgument: true, + }, + ], + }; + } + case FieldTypeChangeCategory.OPTIONAL_DIFFERENT_TO_OPTIONAL_DIFFERENT: { + // SearchInput -> String + // in this case, any ops which use the argument and are not null are breaking + return { + schemaChangeId: schemaCheckId, + changes: [ + { + path: path.slice(1), // The path to the updated argument e.g. 'engineer.name' of the type names + typeName: path[0], + fieldName: path[2], + isArgument: true, + isNull: false, + }, + ], + }; + } + default: { + throw new Error(`Unsupported argument type change category: ${argumentTypeChangeCategory}`); + } + } } - // Only when a required argument is added or type of an argument has changed to a required type - case ChangeType.FieldArgumentAdded: - case ChangeType.FieldArgumentTypeChanged: { + // Only when a required argument is added + case ChangeType.FieldArgumentAdded: { + // in this case, all the ops which have this argument are breaking return { schemaChangeId: schemaCheckId, - // The path should be just the query/mutation/subscription name - // e.g. if 'Query.employee.a', the path should be ['employee'] as its new field or it has changed the type of the argument, we check the usage of the operation. - path: path.slice(1, 2), - typeName: path[0], // Enclosing type e.g. 'Query' or 'Engineer' when the argument is on a field of type Engineer + changes: [ + { + // e.g. if the path recieved is 'Query.employee.a', the path should be ['employee'] as its new field or it has changed the type of the argument, we check the usage of the operation. + path: path.slice(1, 2), + typeName: path[0], + }, + ], }; } + case ChangeType.FieldArgumentRemoved: { + const isRequired = parseArgumentRemoval(change.message); + if (isRequired) { + // in this case, all the ops which use this argument are breaking + return { + schemaChangeId: schemaCheckId, + changes: [ + { + // e.g. if the path recieved is 'Query.employee.a', the path should be ['employee'] as its new field or it has changed the type of the argument, we check the usage of the operation. + path: path.slice(1, 2), + typeName: path[0], + }, + ], + }; + } else { + // in this case, any ops which use the argument and are not null are breaking + return { + schemaChangeId: schemaCheckId, + changes: [ + { + path: path.slice(1), // The path to the updated argument e.g. 'engineer.name' of the type names + typeName: path[0], + isArgument: true, + isNull: false, + }, + ], + }; + } + } } // no return to enforce that all cases are handled } From ae3c079185510060a2b702643e3ac6f46efeccea Mon Sep 17 00:00:00 2001 From: JivusAyrus Date: Fri, 28 Nov 2025 17:45:41 +0530 Subject: [PATCH 08/22] refactor: update field type change categories for clarity and consistency --- .../SchemaUsageTrafficInspector.test.ts | 8 ++--- .../services/SchemaUsageTrafficInspector.ts | 36 ++++++++++--------- 2 files changed, 23 insertions(+), 21 deletions(-) diff --git a/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts b/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts index ca18430b24..0716a49d27 100644 --- a/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts +++ b/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts @@ -119,7 +119,7 @@ describe('Schema Change converter', (ctx) => { { path: ['a', 'b'], typeName: 'Query', - fieldName: 'a', + fieldName: 'b', isArgument: true, isNull: true, }, @@ -144,7 +144,7 @@ describe('Schema Change converter', (ctx) => { { path: ['a', 'b'], typeName: 'Query', - fieldName: 'a', + fieldName: 'b', isArgument: true, }, ]); @@ -168,7 +168,7 @@ describe('Schema Change converter', (ctx) => { { path: ['a', 'b'], typeName: 'Query', - fieldName: 'a', + fieldName: 'b', isArgument: true, }, ]); @@ -192,7 +192,7 @@ describe('Schema Change converter', (ctx) => { { path: ['a', 'b'], typeName: 'Query', - fieldName: 'a', + fieldName: 'b', isArgument: true, isNull: false, }, diff --git a/controlplane/src/core/services/SchemaUsageTrafficInspector.ts b/controlplane/src/core/services/SchemaUsageTrafficInspector.ts index 67e8aa1ac4..9568da8436 100644 --- a/controlplane/src/core/services/SchemaUsageTrafficInspector.ts +++ b/controlplane/src/core/services/SchemaUsageTrafficInspector.ts @@ -8,22 +8,22 @@ export enum FieldTypeChangeCategory { * Optional same type -> Required same type * Example: "Boolean" -> "Boolean!" */ - OPTIONAL_SAME_TO_REQUIRED_SAME = 'OPTIONAL_SAME_TO_REQUIRED_SAME', + OPTIONAL_TO_REQUIRED_SAME = 'OPTIONAL_TO_REQUIRED_SAME', /** * Optional different type -> Required different type * Example: "Boolean" -> "String!" */ - OPTIONAL_DIFFERENT_TO_REQUIRED_DIFFERENT = 'OPTIONAL_DIFFERENT_TO_REQUIRED_DIFFERENT', + OPTIONAL_TO_REQUIRED_DIFFERENT = 'OPTIONAL_TO_REQUIRED_DIFFERENT', /** * Required different type -> Required different type * Example: "Boolean!" -> "String!" */ - REQUIRED_DIFFERENT_TO_REQUIRED_DIFFERENT = 'REQUIRED_DIFFERENT_TO_REQUIRED_DIFFERENT', + REQUIRED_TO_REQUIRED_DIFFERENT = 'REQUIRED_TO_REQUIRED_DIFFERENT', /** * Optional different type -> Optional different type * Example: "Boolean" -> "String" */ - OPTIONAL_DIFFERENT_TO_OPTIONAL_DIFFERENT = 'OPTIONAL_DIFFERENT_TO_OPTIONAL_DIFFERENT', + OPTIONAL_TO_OPTIONAL_DIFFERENT = 'OPTIONAL_TO_OPTIONAL_DIFFERENT', } /** @@ -143,23 +143,23 @@ export function parseTypeChange(message: string): FieldTypeChangeCategory { if (sameType && !fromRequired && toRequired) { // Case 1: Optional same type -> Required same type // Example: "Boolean" -> "Boolean!" - return FieldTypeChangeCategory.OPTIONAL_SAME_TO_REQUIRED_SAME; + return FieldTypeChangeCategory.OPTIONAL_TO_REQUIRED_SAME; } else if (!sameType && !fromRequired && toRequired) { // Case 2: Optional different type -> Required different type // Example: "Boolean" -> "String!" - return FieldTypeChangeCategory.OPTIONAL_DIFFERENT_TO_REQUIRED_DIFFERENT; + return FieldTypeChangeCategory.OPTIONAL_TO_REQUIRED_DIFFERENT; } else if (!sameType && fromRequired && toRequired) { // Case 3: Required different type -> Required different type // Example: "Boolean!" -> "String!" - return FieldTypeChangeCategory.REQUIRED_DIFFERENT_TO_REQUIRED_DIFFERENT; + return FieldTypeChangeCategory.REQUIRED_TO_REQUIRED_DIFFERENT; } else if (!sameType && !fromRequired && !toRequired) { // Case 4: Optional different type -> Optional different type // Example: "Boolean" -> "String" - return FieldTypeChangeCategory.OPTIONAL_DIFFERENT_TO_OPTIONAL_DIFFERENT; + return FieldTypeChangeCategory.OPTIONAL_TO_OPTIONAL_DIFFERENT; } else { // Edge case: same type, from required, to optional (shouldn't happen in breaking changes) // Fallback to same type becoming required - return FieldTypeChangeCategory.OPTIONAL_SAME_TO_REQUIRED_SAME; + return FieldTypeChangeCategory.OPTIONAL_TO_REQUIRED_SAME; } } @@ -241,6 +241,8 @@ export class SchemaUsageTrafficInspector { where.push(`IsInput = true`); } else if (change.isArgument) { where.push(`IsArgument = true`); + } else if (change.isNull !== undefined) { + where.push(`IsNull = ${change.isNull}`); } where.push(`IsIndirectFieldUsage = false`); @@ -495,7 +497,7 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In case ChangeType.InputFieldTypeChanged: { const inputFieldTypeChangeCategory = parseTypeChange(change.message); switch (inputFieldTypeChangeCategory) { - case FieldTypeChangeCategory.OPTIONAL_SAME_TO_REQUIRED_SAME: { + case FieldTypeChangeCategory.OPTIONAL_TO_REQUIRED_SAME: { // Int -> Int! return { schemaChangeId: schemaCheckId, @@ -517,7 +519,7 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In ], }; } - case FieldTypeChangeCategory.OPTIONAL_DIFFERENT_TO_REQUIRED_DIFFERENT: { + case FieldTypeChangeCategory.OPTIONAL_TO_REQUIRED_DIFFERENT: { // Int -> Float! // in this case, all the ops which have this input type are breaking return { @@ -530,7 +532,7 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In ], }; } - case FieldTypeChangeCategory.REQUIRED_DIFFERENT_TO_REQUIRED_DIFFERENT: { + case FieldTypeChangeCategory.REQUIRED_TO_REQUIRED_DIFFERENT: { // Int! -> Float! // in this case, all the ops which have this input type are breaking return { @@ -543,7 +545,7 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In ], }; } - case FieldTypeChangeCategory.OPTIONAL_DIFFERENT_TO_OPTIONAL_DIFFERENT: { + case FieldTypeChangeCategory.OPTIONAL_TO_OPTIONAL_DIFFERENT: { // Int -> Float // in this case, any ops which use the input field and are not null are breaking return { @@ -581,7 +583,7 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In case ChangeType.FieldArgumentTypeChanged: { const argumentTypeChangeCategory = parseTypeChange(change.message); switch (argumentTypeChangeCategory) { - case FieldTypeChangeCategory.OPTIONAL_SAME_TO_REQUIRED_SAME: { + case FieldTypeChangeCategory.OPTIONAL_TO_REQUIRED_SAME: { // SearchInput -> SearchInput! return { schemaChangeId: schemaCheckId, @@ -598,7 +600,7 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In ], }; } - case FieldTypeChangeCategory.OPTIONAL_DIFFERENT_TO_REQUIRED_DIFFERENT: { + case FieldTypeChangeCategory.OPTIONAL_TO_REQUIRED_DIFFERENT: { // SearchInput -> String! // in this case, all the ops which have this argument are breaking return { @@ -613,7 +615,7 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In ], }; } - case FieldTypeChangeCategory.REQUIRED_DIFFERENT_TO_REQUIRED_DIFFERENT: { + case FieldTypeChangeCategory.REQUIRED_TO_REQUIRED_DIFFERENT: { // SearchInput! -> String! // in this case, all the ops which have this argument are breaking return { @@ -628,7 +630,7 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In ], }; } - case FieldTypeChangeCategory.OPTIONAL_DIFFERENT_TO_OPTIONAL_DIFFERENT: { + case FieldTypeChangeCategory.OPTIONAL_TO_OPTIONAL_DIFFERENT: { // SearchInput -> String // in this case, any ops which use the argument and are not null are breaking return { From 58531dcb97e4f8e56efc0ddf95dc2d2fa13fed6a Mon Sep 17 00:00:00 2001 From: JivusAyrus Date: Fri, 28 Nov 2025 19:20:45 +0530 Subject: [PATCH 09/22] test: enhance schema change tests for clarity and coverage --- .../SchemaUsageTrafficInspector.test.ts | 55 ++++++++++++++----- .../services/SchemaUsageTrafficInspector.ts | 9 +-- 2 files changed, 44 insertions(+), 20 deletions(-) diff --git a/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts b/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts index 0716a49d27..f4655a2b55 100644 --- a/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts +++ b/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts @@ -19,6 +19,8 @@ describe('Schema Change converter', (ctx) => { const changes = await getBreakingChanges(a, b); + // the below conditions are for what would constitute a breaking change + // if the condition exists, it would be breaking expect(changes).toEqual([ { path: ['a'], @@ -101,7 +103,7 @@ describe('Schema Change converter', (ctx) => { ]); }); - test('Change argument type from optional same to required same', async () => { + test('Change argument type from optional to required same', async () => { const a = buildSchema(/* GraphQL */ ` type Query { a(b: Boolean): String @@ -126,7 +128,7 @@ describe('Schema Change converter', (ctx) => { ]); }); - test('Change argument type from optional different to required different', async () => { + test('Change argument type from optional to required different', async () => { const a = buildSchema(/* GraphQL */ ` type Query { a(b: Boolean): String @@ -150,7 +152,7 @@ describe('Schema Change converter', (ctx) => { ]); }); - test('Change argument type from required different to required different', async () => { + test('Change argument type from required to required different', async () => { const a = buildSchema(/* GraphQL */ ` type Query { a(b: Boolean!): String @@ -174,7 +176,7 @@ describe('Schema Change converter', (ctx) => { ]); }); - test('Change argument type from optional different to optional different', async () => { + test('Change argument type from optional to optional different', async () => { const a = buildSchema(/* GraphQL */ ` type Query { a(b: Boolean): String @@ -220,11 +222,12 @@ describe('Schema Change converter', (ctx) => { { path: ['Foo'], isInput: true, + isNull: false, }, ]); }); - test('Remove an Input field', async () => { + test('Remove an required input field', async () => { const a = buildSchema(/* GraphQL */ ` input Foo { a: String! @@ -243,11 +246,38 @@ describe('Schema Change converter', (ctx) => { { path: ['Foo'], isInput: true, + isNull: false, }, ]); }); - test('Change input field type from required different to required different', async () => { + test('Remove an optional input field', async () => { + const a = buildSchema(/* GraphQL */ ` + input Foo { + a: String! + b: String + } + `); + const b = buildSchema(/* GraphQL */ ` + input Foo { + a: String! + } + `); + + const changes = await getBreakingChanges(a, b); + + // As we dont know whether the field is optional or required, we use the same condition as required fields + // We will not miss any breaking ops but will have some ops which might not be breaking + expect(changes).toEqual([ + { + path: ['Foo'], + isInput: true, + isNull: false, + }, + ]); + }); + + test('Change input field type from required to required different', async () => { const a = buildSchema(/* GraphQL */ ` input Foo { a: String! @@ -265,11 +295,12 @@ describe('Schema Change converter', (ctx) => { { path: ['Foo'], isInput: true, + isNull: false, }, ]); }); - test('Change input field type from optional same to required same', async () => { + test('Change input field type from optional to required same', async () => { const a = buildSchema(/* GraphQL */ ` input Foo { a: String @@ -290,15 +321,10 @@ describe('Schema Change converter', (ctx) => { isInput: true, isNull: true, }, - { - path: ['Foo'], - isInput: true, - isNull: true, - }, ]); }); - test('Change input field type from optional different to required different', async () => { + test('Change input field type from optional to required different', async () => { const a = buildSchema(/* GraphQL */ ` input Foo { a: String @@ -316,11 +342,12 @@ describe('Schema Change converter', (ctx) => { { path: ['Foo'], isInput: true, + isNull: false, }, ]); }); - test('Change input field type from optional different to optional different', async () => { + test('Change input field type from optional to optional different', async () => { const a = buildSchema(/* GraphQL */ ` input Foo { a: String diff --git a/controlplane/src/core/services/SchemaUsageTrafficInspector.ts b/controlplane/src/core/services/SchemaUsageTrafficInspector.ts index 9568da8436..11c25a2461 100644 --- a/controlplane/src/core/services/SchemaUsageTrafficInspector.ts +++ b/controlplane/src/core/services/SchemaUsageTrafficInspector.ts @@ -510,12 +510,6 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In isInput: true, isNull: true, }, - // thic case will handle when the entire input is passed as null or not passed at all - { - path: [path[0]], - isInput: true, - isNull: true, - }, ], }; } @@ -528,6 +522,7 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In { path: [path[0]], isInput: true, + isNull: false, }, ], }; @@ -541,6 +536,7 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In { path: [path[0]], isInput: true, + isNull: false, }, ], }; @@ -574,6 +570,7 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In { path: [path[0]], isInput: true, + isNull: false, }, ], }; From cd95ed260ddb280d9489f7ff6b08e49ce8d4fc99 Mon Sep 17 00:00:00 2001 From: JivusAyrus Date: Fri, 28 Nov 2025 19:50:41 +0530 Subject: [PATCH 10/22] refactor: unify inspector schema change handling by removing group structure --- controlplane/src/core/composition/composer.ts | 4 +- .../repositories/SchemaCheckRepository.ts | 9 +- .../core/repositories/SubgraphRepository.ts | 11 +- .../SchemaUsageTrafficInspector.test.ts | 31 ++- .../services/SchemaUsageTrafficInspector.ts | 259 +++++++----------- 5 files changed, 122 insertions(+), 192 deletions(-) diff --git a/controlplane/src/core/composition/composer.ts b/controlplane/src/core/composition/composer.ts index ee07221b7e..afc22b2237 100644 --- a/controlplane/src/core/composition/composer.ts +++ b/controlplane/src/core/composition/composer.ts @@ -44,7 +44,7 @@ import * as schema from '../../db/schema.js'; import { ClickHouseClient } from '../clickhouse/index.js'; import { CacheWarmerRepository } from '../repositories/CacheWarmerRepository.js'; import { NamespaceRepository } from '../repositories/NamespaceRepository.js'; -import { InspectorSchemaChangeGroup } from '../services/SchemaUsageTrafficInspector.js'; +import { InspectorSchemaChange } from '../services/SchemaUsageTrafficInspector.js'; import { SchemaCheckChangeAction } from '../../db/models.js'; import { composeFederatedGraphWithPotentialContracts, composeSubgraphs } from './composition.js'; import { getDiffBetweenGraphs, GetDiffBetweenGraphsResult, GetDiffBetweenGraphsSuccess } from './schemaCheck.js'; @@ -262,7 +262,7 @@ export type CheckSubgraph = { checkSubgraphId: string; newSchemaSDL: string; newGraphQLSchema?: GraphQLSchema; - inspectorChanges: InspectorSchemaChangeGroup[]; + inspectorChanges: InspectorSchemaChange[]; schemaChanges: GetDiffBetweenGraphsSuccess; storedBreakingChanges: SchemaCheckChangeAction[]; routerCompatibilityVersion: string; diff --git a/controlplane/src/core/repositories/SchemaCheckRepository.ts b/controlplane/src/core/repositories/SchemaCheckRepository.ts index 3bf3bbf700..9da31f4c84 100644 --- a/controlplane/src/core/repositories/SchemaCheckRepository.ts +++ b/controlplane/src/core/repositories/SchemaCheckRepository.ts @@ -40,7 +40,7 @@ import { getDiffBetweenGraphs, SchemaDiff } from '../composition/schemaCheck.js' import { collectOperationUsageStats, InspectorOperationResult, - InspectorSchemaChangeGroup, + InspectorSchemaChange, SchemaUsageTrafficInspector, } from '../services/SchemaUsageTrafficInspector.js'; import { @@ -902,7 +902,7 @@ export class SchemaCheckRepository { schemaCheckSubgraphId, }); - let inspectorChanges: InspectorSchemaChangeGroup[] = []; + let inspectorChanges: InspectorSchemaChange[] = []; // For operations checks we only consider breaking changes inspectorChanges = trafficInspector.schemaChangesToInspectorChanges( schemaChanges.breakingChanges, @@ -1040,10 +1040,7 @@ export class SchemaCheckRepository { That means any breaking change is really breaking */ for (const [subgraphName, checkSubgraph] of checkSubgraphs.entries()) { - if ( - composition.errors.length > 0 || - checkSubgraph.inspectorChanges.every((group) => group.changes.length === 0) - ) { + if (composition.errors.length > 0 || checkSubgraph.inspectorChanges.length === 0) { continue; } diff --git a/controlplane/src/core/repositories/SubgraphRepository.ts b/controlplane/src/core/repositories/SubgraphRepository.ts index 523de96f42..60ab3815a8 100644 --- a/controlplane/src/core/repositories/SubgraphRepository.ts +++ b/controlplane/src/core/repositories/SubgraphRepository.ts @@ -53,7 +53,7 @@ import { RBACEvaluator } from '../services/RBACEvaluator.js'; import { collectOperationUsageStats, InspectorOperationResult, - InspectorSchemaChangeGroup, + InspectorSchemaChange, SchemaUsageTrafficInspector, } from '../services/SchemaUsageTrafficInspector.js'; import { @@ -2003,7 +2003,7 @@ export class SubgraphRepository { const compositionErrors: PlainMessage[] = []; const compositionWarnings: PlainMessage[] = []; - let inspectorChanges: InspectorSchemaChangeGroup[] = []; + let inspectorChanges: InspectorSchemaChange[] = []; // For operations checks we only consider breaking changes inspectorChanges = trafficInspector.schemaChangesToInspectorChanges( @@ -2037,12 +2037,7 @@ export class SubgraphRepository { 3. When user wants to skip the traffic check altogether That means any breaking change is really breaking */ - if ( - composedGraph.errors.length > 0 || - inspectorChanges.every((group) => group.changes.length === 0) || - skipTrafficCheck || - !subgraph - ) { + if (composedGraph.errors.length > 0 || inspectorChanges.length === 0 || skipTrafficCheck || !subgraph) { continue; } diff --git a/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts b/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts index f4655a2b55..6b7a7d9131 100644 --- a/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts +++ b/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts @@ -1,7 +1,7 @@ import { describe, expect, test } from 'vitest'; import { buildSchema, GraphQLSchema } from 'graphql'; import { getSchemaDiff } from '../composition/schemaCheck.js'; -import { InspectorSchemaChange, InspectorSchemaChangeGroup, toInspectorChange } from './SchemaUsageTrafficInspector.js'; +import { InspectorSchemaChange, toInspectorChange } from './SchemaUsageTrafficInspector.js'; describe('Schema Change converter', (ctx) => { describe('Arguments', (ctx) => { @@ -23,6 +23,7 @@ describe('Schema Change converter', (ctx) => { // if the condition exists, it would be breaking expect(changes).toEqual([ { + schemaChangeId: '0', path: ['a'], typeName: 'Query', }, @@ -51,6 +52,7 @@ describe('Schema Change converter', (ctx) => { expect(changes).toEqual([ { + schemaChangeId: '0', path: ['details'], typeName: 'Rocket', }, @@ -73,6 +75,7 @@ describe('Schema Change converter', (ctx) => { expect(changes).toEqual([ { + schemaChangeId: '0', path: ['a'], typeName: 'Query', }, @@ -95,6 +98,7 @@ describe('Schema Change converter', (ctx) => { expect(changes).toEqual([ { + schemaChangeId: '0', path: ['a', 'b'], typeName: 'Query', isArgument: true, @@ -119,6 +123,7 @@ describe('Schema Change converter', (ctx) => { expect(changes).toEqual([ { + schemaChangeId: '0', path: ['a', 'b'], typeName: 'Query', fieldName: 'b', @@ -144,6 +149,7 @@ describe('Schema Change converter', (ctx) => { expect(changes).toEqual([ { + schemaChangeId: '0', path: ['a', 'b'], typeName: 'Query', fieldName: 'b', @@ -168,6 +174,7 @@ describe('Schema Change converter', (ctx) => { expect(changes).toEqual([ { + schemaChangeId: '0', path: ['a', 'b'], typeName: 'Query', fieldName: 'b', @@ -192,6 +199,7 @@ describe('Schema Change converter', (ctx) => { expect(changes).toEqual([ { + schemaChangeId: '0', path: ['a', 'b'], typeName: 'Query', fieldName: 'b', @@ -220,6 +228,7 @@ describe('Schema Change converter', (ctx) => { expect(changes).toEqual([ { + schemaChangeId: '0', path: ['Foo'], isInput: true, isNull: false, @@ -244,6 +253,7 @@ describe('Schema Change converter', (ctx) => { expect(changes).toEqual([ { + schemaChangeId: '0', path: ['Foo'], isInput: true, isNull: false, @@ -270,6 +280,7 @@ describe('Schema Change converter', (ctx) => { // We will not miss any breaking ops but will have some ops which might not be breaking expect(changes).toEqual([ { + schemaChangeId: '0', path: ['Foo'], isInput: true, isNull: false, @@ -293,6 +304,7 @@ describe('Schema Change converter', (ctx) => { expect(changes).toEqual([ { + schemaChangeId: '0', path: ['Foo'], isInput: true, isNull: false, @@ -316,6 +328,7 @@ describe('Schema Change converter', (ctx) => { expect(changes).toEqual([ { + schemaChangeId: '0', typeName: 'Foo', fieldName: 'a', isInput: true, @@ -340,6 +353,7 @@ describe('Schema Change converter', (ctx) => { expect(changes).toEqual([ { + schemaChangeId: '0', path: ['Foo'], isInput: true, isNull: false, @@ -363,6 +377,7 @@ describe('Schema Change converter', (ctx) => { expect(changes).toEqual([ { + schemaChangeId: '0', typeName: 'Foo', fieldName: 'a', isInput: true, @@ -392,9 +407,11 @@ describe('Schema Change converter', (ctx) => { expect(changes).toEqual([ { + schemaChangeId: '0', typeName: 'Rocket', }, { + schemaChangeId: '1', fieldName: 'a', typeName: 'Query', }, @@ -429,6 +446,7 @@ describe('Schema Change converter', (ctx) => { expect(changes).toEqual([ { + schemaChangeId: '0', namedType: 'enumA', }, ]); @@ -469,12 +487,7 @@ async function getBreakingChanges(a: GraphQLSchema, b: GraphQLSchema): Promise c !== null) as InspectorSchemaChangeGroup[]; - - // Flatten groups - const result: InspectorSchemaChange[] = []; - for (const group of groups) { - result.push(...group.changes); - } - return result; + .filter((c) => c !== null) as InspectorSchemaChange[]; + + return groups; } diff --git a/controlplane/src/core/services/SchemaUsageTrafficInspector.ts b/controlplane/src/core/services/SchemaUsageTrafficInspector.ts index 11c25a2461..606bcbbf6f 100644 --- a/controlplane/src/core/services/SchemaUsageTrafficInspector.ts +++ b/controlplane/src/core/services/SchemaUsageTrafficInspector.ts @@ -164,6 +164,7 @@ export function parseTypeChange(message: string): FieldTypeChangeCategory { } export interface InspectorSchemaChange { + schemaChangeId: string; typeName?: string; namedType?: string; fieldName?: string; @@ -173,11 +174,6 @@ export interface InspectorSchemaChange { isNull?: boolean; } -export interface InspectorSchemaChangeGroup { - schemaChangeId: string; - changes: InspectorSchemaChange[]; -} - export interface InspectorFilter { federatedGraphId: string; organizationId: string; @@ -201,56 +197,45 @@ export class SchemaUsageTrafficInspector { /** * Inspect the usage of a schema change in the last X days on real traffic and return the * affected operations. We will consider all available compositions. - * @param changes - Array of change groups, where each group contains changes that should be queried together with OR conditions + * @param changes - Array of inspector changes */ public async inspect( - changes: InspectorSchemaChangeGroup[], + changes: InspectorSchemaChange[], filter: InspectorFilter, ): Promise> { const results: Map = new Map(); - for (const changeGroup of changes) { - if (changeGroup.changes.length === 0) { - continue; - } - - // Build OR conditions for each change in the group - const orConditions: string[] = []; - - for (const change of changeGroup.changes) { - const where: string[] = []; - // Used for arguments usage check - if (change.path) { - where.push( - `startsWith(Path, [${change.path.map((seg) => `'${seg}'`).join(',')}]) AND length(Path) = ${ - change.path.length - }`, - ); - } - if (change.namedType) { - where.push(`NamedType = '${change.namedType}'`); - } - if (change.typeName) { - where.push(`hasAny(TypeNames, ['${change.typeName}'])`); - } - // fieldName can be empty if a type was removed - if (change.fieldName) { - where.push(`FieldName = '${change.fieldName}'`); - } - if (change.isInput) { - where.push(`IsInput = true`); - } else if (change.isArgument) { - where.push(`IsArgument = true`); - } else if (change.isNull !== undefined) { - where.push(`IsNull = ${change.isNull}`); - } - where.push(`IsIndirectFieldUsage = false`); + for (const change of changes) { + const where: string[] = []; - // Combine all conditions for this change with AND, then wrap in parentheses for OR grouping - orConditions.push(`(${where.join(' AND ')})`); + // Used for arguments usage check + if (change.path) { + where.push( + `startsWith(Path, [${change.path.map((seg) => `'${seg}'`).join(',')}]) AND length(Path) = ${ + change.path.length + }`, + ); + } + if (change.namedType) { + where.push(`NamedType = '${change.namedType}'`); + } + if (change.typeName) { + where.push(`hasAny(TypeNames, ['${change.typeName}'])`); + } + // fieldName can be empty if a type was removed + if (change.fieldName) { + where.push(`FieldName = '${change.fieldName}'`); + } + if (change.isInput) { + where.push(`IsInput = true`); + } else if (change.isArgument) { + where.push(`IsArgument = true`); + } else if (change.isNull !== undefined) { + where.push(`IsNull = ${change.isNull}`); } + where.push(`IsIndirectFieldUsage = false`); - // Build the query with OR conditions for all changes in the group + // Build the query const query = ` SELECT OperationHash as operationHash, last_value(OperationType) as operationType, @@ -264,7 +249,7 @@ export class SchemaUsageTrafficInspector { FederatedGraphID = '${filter.federatedGraphId}' AND hasAny(SubgraphIDs, ['${filter.subgraphId}']) AND OrganizationID = '${filter.organizationId}' AND - (${orConditions.join(' OR ')}) + (${where.join(' AND ')}) GROUP BY OperationHash `; @@ -278,7 +263,7 @@ export class SchemaUsageTrafficInspector { if (Array.isArray(res)) { const ops = res.map((r) => ({ - schemaChangeId: changeGroup.schemaChangeId, + schemaChangeId: change.schemaChangeId, hash: r.operationHash, name: r.operationName, type: r.operationType, @@ -288,7 +273,7 @@ export class SchemaUsageTrafficInspector { })); if (ops.length > 0) { - results.set(changeGroup.schemaChangeId, [...(results.get(changeGroup.schemaChangeId) || []), ...ops]); + results.set(change.schemaChangeId, [...(results.get(change.schemaChangeId) || []), ...ops]); } } } @@ -299,12 +284,12 @@ export class SchemaUsageTrafficInspector { /** * Convert schema changes to inspector changes. Will ignore a change if it is not inspectable. * Ultimately, will result in a breaking change because the change is not inspectable with the current implementation. - * Returns an array of change groups, where each group contains changes that should be queried together with OR conditions. + * Returns an array of inspector changes. */ public schemaChangesToInspectorChanges( schemaChanges: SchemaDiff[], schemaCheckActions: SchemaCheckChangeAction[], - ): InspectorSchemaChangeGroup[] { + ): InspectorSchemaChange[] { const operations = schemaChanges .map((change) => { // find the schema check action that matches the change @@ -317,7 +302,7 @@ export class SchemaUsageTrafficInspector { } return toInspectorChange(change, schemaCheckAction.id); }) - .filter((change) => change !== null) as InspectorSchemaChangeGroup[]; + .filter((change) => change !== null) as InspectorSchemaChange[]; return operations; } @@ -374,9 +359,9 @@ export function collectOperationUsageStats(inspectorResult: InspectorOperationRe /** * Convert a schema change to an inspector change. Throws an error if the change is not supported. * Only breaking changes should be passed to this function because we only care about breaking changes. - * Returns a group of changes that should be queried together in a single query with OR conditions. + * Returns an inspector change with the schemaChangeId included. */ -export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): InspectorSchemaChangeGroup | null { +export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): InspectorSchemaChange | null { const path = change.path.split('.'); switch (change.changeType) { @@ -458,11 +443,7 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In case ChangeType.ObjectTypeInterfaceRemoved: { return { schemaChangeId: schemaCheckId, - changes: [ - { - typeName: path[0], - }, - ], + typeName: path[0], }; } // 1. When a field is removed we know the exact type and field name e.g. 'Engineer.name' @@ -471,12 +452,8 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In case ChangeType.FieldTypeChanged: { return { schemaChangeId: schemaCheckId, - changes: [ - { - typeName: path[0], - fieldName: path[1], - }, - ], + typeName: path[0], + fieldName: path[1], }; } // 1. When an enum value is added or removed, we only know the affected type. This is fine because any change to an enum value is breaking. @@ -486,11 +463,7 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In case ChangeType.EnumValueRemoved: { return { schemaChangeId: schemaCheckId, - changes: [ - { - namedType: path[0], - }, - ], + namedType: path[0], }; } // 1. When the type of input field has changed, we know the exact type name and field name e.g. 'MyInput.name' @@ -501,16 +474,12 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In // Int -> Int! return { schemaChangeId: schemaCheckId, - changes: [ - // if the input is used and the field is not passed, - // but now that it is required, its breaking - { - typeName: path[0], - fieldName: path[1], - isInput: true, - isNull: true, - }, - ], + // if the input is used and the field is not passed, + // but now that it is required, its breaking + typeName: path[0], + fieldName: path[1], + isInput: true, + isNull: true, }; } case FieldTypeChangeCategory.OPTIONAL_TO_REQUIRED_DIFFERENT: { @@ -518,13 +487,9 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In // in this case, all the ops which have this input type are breaking return { schemaChangeId: schemaCheckId, - changes: [ - { - path: [path[0]], - isInput: true, - isNull: false, - }, - ], + path: [path[0]], + isInput: true, + isNull: false, }; } case FieldTypeChangeCategory.REQUIRED_TO_REQUIRED_DIFFERENT: { @@ -532,13 +497,9 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In // in this case, all the ops which have this input type are breaking return { schemaChangeId: schemaCheckId, - changes: [ - { - path: [path[0]], - isInput: true, - isNull: false, - }, - ], + path: [path[0]], + isInput: true, + isNull: false, }; } case FieldTypeChangeCategory.OPTIONAL_TO_OPTIONAL_DIFFERENT: { @@ -546,14 +507,10 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In // in this case, any ops which use the input field and are not null are breaking return { schemaChangeId: schemaCheckId, - changes: [ - { - typeName: path[0], - fieldName: path[1], - isInput: true, - isNull: false, - }, - ], + typeName: path[0], + fieldName: path[1], + isInput: true, + isNull: false, }; } default: { @@ -566,13 +523,9 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In // in these cases, all the ops which use this input type are breaking return { schemaChangeId: schemaCheckId, - changes: [ - { - path: [path[0]], - isInput: true, - isNull: false, - }, - ], + path: [path[0]], + isInput: true, + isNull: false, }; } // 1. When an argument has changed, we know the exact path to the argument e.g. 'Query.engineer.id' @@ -584,17 +537,13 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In // SearchInput -> SearchInput! return { schemaChangeId: schemaCheckId, - changes: [ - // if the argument is used and not passed (null), - // but now that it is required, its breaking - { - path: path.slice(1), // The path to the updated argument e.g. 'engineer.name' of the type names - typeName: path[0], - fieldName: path[2], - isArgument: true, - isNull: true, - }, - ], + // if the argument is used and not passed (null), + // but now that it is required, its breaking + path: path.slice(1), // The path to the updated argument e.g. 'engineer.name' of the type names + typeName: path[0], + fieldName: path[2], + isArgument: true, + isNull: true, }; } case FieldTypeChangeCategory.OPTIONAL_TO_REQUIRED_DIFFERENT: { @@ -602,14 +551,10 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In // in this case, all the ops which have this argument are breaking return { schemaChangeId: schemaCheckId, - changes: [ - { - path: path.slice(1), // The path to the updated argument e.g. 'engineer.name' of the type names - typeName: path[0], - fieldName: path[2], - isArgument: true, - }, - ], + path: path.slice(1), // The path to the updated argument e.g. 'engineer.name' of the type names + typeName: path[0], + fieldName: path[2], + isArgument: true, }; } case FieldTypeChangeCategory.REQUIRED_TO_REQUIRED_DIFFERENT: { @@ -617,14 +562,10 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In // in this case, all the ops which have this argument are breaking return { schemaChangeId: schemaCheckId, - changes: [ - { - path: path.slice(1), // The path to the updated argument e.g. 'engineer.name' of the type names - typeName: path[0], - fieldName: path[2], - isArgument: true, - }, - ], + path: path.slice(1), // The path to the updated argument e.g. 'engineer.name' of the type names + typeName: path[0], + fieldName: path[2], + isArgument: true, }; } case FieldTypeChangeCategory.OPTIONAL_TO_OPTIONAL_DIFFERENT: { @@ -632,15 +573,11 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In // in this case, any ops which use the argument and are not null are breaking return { schemaChangeId: schemaCheckId, - changes: [ - { - path: path.slice(1), // The path to the updated argument e.g. 'engineer.name' of the type names - typeName: path[0], - fieldName: path[2], - isArgument: true, - isNull: false, - }, - ], + path: path.slice(1), // The path to the updated argument e.g. 'engineer.name' of the type names + typeName: path[0], + fieldName: path[2], + isArgument: true, + isNull: false, }; } default: { @@ -654,13 +591,9 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In // in this case, all the ops which have this argument are breaking return { schemaChangeId: schemaCheckId, - changes: [ - { - // e.g. if the path recieved is 'Query.employee.a', the path should be ['employee'] as its new field or it has changed the type of the argument, we check the usage of the operation. - path: path.slice(1, 2), - typeName: path[0], - }, - ], + // e.g. if the path recieved is 'Query.employee.a', the path should be ['employee'] as its new field or it has changed the type of the argument, we check the usage of the operation. + path: path.slice(1, 2), + typeName: path[0], }; } case ChangeType.FieldArgumentRemoved: { @@ -669,26 +602,18 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In // in this case, all the ops which use this argument are breaking return { schemaChangeId: schemaCheckId, - changes: [ - { - // e.g. if the path recieved is 'Query.employee.a', the path should be ['employee'] as its new field or it has changed the type of the argument, we check the usage of the operation. - path: path.slice(1, 2), - typeName: path[0], - }, - ], + // e.g. if the path recieved is 'Query.employee.a', the path should be ['employee'] as its new field or it has changed the type of the argument, we check the usage of the operation. + path: path.slice(1, 2), + typeName: path[0], }; } else { // in this case, any ops which use the argument and are not null are breaking return { schemaChangeId: schemaCheckId, - changes: [ - { - path: path.slice(1), // The path to the updated argument e.g. 'engineer.name' of the type names - typeName: path[0], - isArgument: true, - isNull: false, - }, - ], + path: path.slice(1), // The path to the updated argument e.g. 'engineer.name' of the type names + typeName: path[0], + isArgument: true, + isNull: false, }; } } From b42ec8eaf99beac546c3255b0820cd1211d467b8 Mon Sep 17 00:00:00 2001 From: JivusAyrus Date: Fri, 28 Nov 2025 19:52:48 +0530 Subject: [PATCH 11/22] fix: correct SQL query formatting in SchemaUsageTrafficInspector --- controlplane/src/core/services/SchemaUsageTrafficInspector.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/controlplane/src/core/services/SchemaUsageTrafficInspector.ts b/controlplane/src/core/services/SchemaUsageTrafficInspector.ts index 606bcbbf6f..d3acb01330 100644 --- a/controlplane/src/core/services/SchemaUsageTrafficInspector.ts +++ b/controlplane/src/core/services/SchemaUsageTrafficInspector.ts @@ -249,7 +249,7 @@ export class SchemaUsageTrafficInspector { FederatedGraphID = '${filter.federatedGraphId}' AND hasAny(SubgraphIDs, ['${filter.subgraphId}']) AND OrganizationID = '${filter.organizationId}' AND - (${where.join(' AND ')}) + ${where.join(' AND ')} GROUP BY OperationHash `; From 52609fe257c5685912d4d61e2eed0574bfef51bf Mon Sep 17 00:00:00 2001 From: JivusAyrus Date: Fri, 28 Nov 2025 19:56:29 +0530 Subject: [PATCH 12/22] refactor: remove redundant comment in SchemaUsageTrafficInspector --- controlplane/src/core/services/SchemaUsageTrafficInspector.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/controlplane/src/core/services/SchemaUsageTrafficInspector.ts b/controlplane/src/core/services/SchemaUsageTrafficInspector.ts index d3acb01330..1b7238adf9 100644 --- a/controlplane/src/core/services/SchemaUsageTrafficInspector.ts +++ b/controlplane/src/core/services/SchemaUsageTrafficInspector.ts @@ -235,7 +235,6 @@ export class SchemaUsageTrafficInspector { } where.push(`IsIndirectFieldUsage = false`); - // Build the query const query = ` SELECT OperationHash as operationHash, last_value(OperationType) as operationType, From 06d81a96fde09a605a76f543b69ea4698c33b26f Mon Sep 17 00:00:00 2001 From: JivusAyrus Date: Mon, 1 Dec 2025 11:58:02 +0100 Subject: [PATCH 13/22] fix: pr suggestions --- .../src/core/services/SchemaUsageTrafficInspector.test.ts | 2 +- controlplane/src/core/services/SchemaUsageTrafficInspector.ts | 2 +- ...0223951_recreate_gql_schema_usage_5m_90d_mv_with_is_null.sql | 1 - 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts b/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts index 6b7a7d9131..4975439ac2 100644 --- a/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts +++ b/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts @@ -236,7 +236,7 @@ describe('Schema Change converter', (ctx) => { ]); }); - test('Remove an required input field', async () => { + test('Remove a required input field', async () => { const a = buildSchema(/* GraphQL */ ` input Foo { a: String! diff --git a/controlplane/src/core/services/SchemaUsageTrafficInspector.ts b/controlplane/src/core/services/SchemaUsageTrafficInspector.ts index 1b7238adf9..c37f008511 100644 --- a/controlplane/src/core/services/SchemaUsageTrafficInspector.ts +++ b/controlplane/src/core/services/SchemaUsageTrafficInspector.ts @@ -513,7 +513,7 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In }; } default: { - throw new Error(`Unsupported input field type change category: ${FieldTypeChangeCategory}`); + throw new Error(`Unsupported input field type change category: ${inputFieldTypeChangeCategory}`); } } } diff --git a/graphqlmetrics/migrations/20251120223951_recreate_gql_schema_usage_5m_90d_mv_with_is_null.sql b/graphqlmetrics/migrations/20251120223951_recreate_gql_schema_usage_5m_90d_mv_with_is_null.sql index 84e34366d4..f28d6f9d6d 100644 --- a/graphqlmetrics/migrations/20251120223951_recreate_gql_schema_usage_5m_90d_mv_with_is_null.sql +++ b/graphqlmetrics/migrations/20251120223951_recreate_gql_schema_usage_5m_90d_mv_with_is_null.sql @@ -32,7 +32,6 @@ GROUP BY FederatedGraphID, RouterConfigVersion, OrganizationID, - OperationType, ClientName, ClientVersion, Path, From a1ebd48c7c4f840b645346be898e58fa38c0b032 Mon Sep 17 00:00:00 2001 From: StarpTech Date: Mon, 1 Dec 2025 12:58:46 +0100 Subject: [PATCH 14/22] fix: nil check for variables, handle null lists --- router/pkg/graphqlschemausage/schemausage.go | 33 +- .../graphqlschemausage/schemausage_test.go | 312 ++++++++++++++++++ 2 files changed, 342 insertions(+), 3 deletions(-) diff --git a/router/pkg/graphqlschemausage/schemausage.go b/router/pkg/graphqlschemausage/schemausage.go index b2b2319123..e7d438297f 100644 --- a/router/pkg/graphqlschemausage/schemausage.go +++ b/router/pkg/graphqlschemausage/schemausage.go @@ -30,6 +30,12 @@ // IsNull flag to indicate null propagation. When an input is null, the chain stops there—nested // fields are not traversed since the parent is null. // +// For list-typed fields: +// - Null list values (e.g., tags: null where tags: [String]) are tracked with IsNull=true +// - Empty lists (e.g., tags: []) do not produce element entries (nothing to iterate) +// - Null elements within lists (e.g., tags: ["a", null, "b"]) are NOT individually tracked +// (the field-level usage already indicates the list type is being used) +// // # Design Components // // The package uses dependency injection and separation of concerns: @@ -76,6 +82,9 @@ func GetTypeFieldUsageInfo(operationPlan plan.Plan) []*TypeFieldUsageInfo { // GetArgumentUsageInfo extracts argument usage by correlating AST arguments with execution plan // field paths. Includes null tracking for both inline and variable-based argument values. +// +// The variables parameter can be nil, which is treated as "no variables provided". When nil, +// null detection for variable-based arguments will default to false (cannot determine nullness). func GetArgumentUsageInfo(operation, definition *ast.Document, variables *astjson.Value, operationPlan plan.Plan, remapVariables map[string]string) ([]*graphqlmetrics.ArgumentUsageInfo, error) { subgraphMapper := newSubgraphMapper(operationPlan, operation, definition) nullDetector := newNullValueDetector(operation, variables, remapVariables) @@ -108,6 +117,9 @@ func GetArgumentUsageInfo(operation, definition *ast.Document, variables *astjso // GetInputUsageInfo extracts input usage by traversing variable values. Tracks both explicit // nulls ({"field": null}) and implicit nulls (missing fields) for breaking change detection. // Also tracks input usage for implicitly null input type arguments (arguments not provided). +// +// The variables parameter can be nil, which is treated as "no variables provided". When nil, +// input object types are still tracked with IsNull=true for breaking change detection. func GetInputUsageInfo(operation, definition *ast.Document, variables *astjson.Value, operationPlan plan.Plan, remapVariables map[string]string) ([]*graphqlmetrics.InputUsageInfo, error) { subgraphMapper := newSubgraphMapper(operationPlan, operation, definition) traverser := newInputTraverser(definition, subgraphMapper) @@ -759,6 +771,8 @@ func (t *inputTraverser) createUsageInfo(fieldName, typeName, parentTypeName str func (t *inputTraverser) traverseInputObject(jsonValue *astjson.Value, fieldName, typeName, parentTypeName string, defNode ast.Node, usageInfo *graphqlmetrics.InputUsageInfo) { switch jsonValue.Type() { case astjson.TypeArray: + // Note: arrays at this level mean list of input objects (e.g., [InputType]) + // If we reach here, the array itself is not null, so iterate normally for _, arrayValue := range jsonValue.GetArray() { t.traverse(arrayValue, fieldName, typeName, parentTypeName, false) } @@ -805,6 +819,14 @@ func (t *inputTraverser) processField(fieldName string, value *astjson.Value, pa fieldIsNull := value.Type() == astjson.TypeNull if t.definition.TypeIsList(fieldDef.Type) { + // If the list field itself is null, record a single null usage and stop. + // This is critical for breaking change detection (e.g., [String] -> [String]!). + if fieldIsNull { + t.traverse(value, fieldName, fieldTypeName, parentTypeName, true) + return + } + + // List is not null - iterate through elements for _, arrayValue := range value.GetArray() { t.traverse(arrayValue, fieldName, fieldTypeName, parentTypeName, false) } @@ -894,6 +916,7 @@ func (t *inputTraverser) infoEquals(a, b *graphqlmetrics.InputUsageInfo) bool { // processVariableDefinition processes a variable definition and initiates input traversal. // Tracks input usage even when the variable is not provided in the variables JSON (empty variables). +// Handles nil variables gracefully by treating them as "no variables provided". func processVariableDefinition(traverser *inputTraverser, operation, definition *ast.Document, variables *astjson.Value, nullDetector *nullValueDetector, subgraphMapper *subgraphMapper, ref int) { varDef := operation.VariableDefinitions[ref] varTypeRef := varDef.Type @@ -905,10 +928,14 @@ func processVariableDefinition(traverser *inputTraverser, operation, definition // Map back to original name for JSON lookup originalVarName := nullDetector.getOriginalVariableName(normalizedVarName) - // Look up the variable value - jsonField := variables.Get(originalVarName) + // Look up the variable value (treat nil variables as "no variables provided") + var jsonField *astjson.Value + if variables != nil { + jsonField = variables.Get(originalVarName) + } + if jsonField == nil { - // Variable is not provided in variables JSON - still track input type usage if it's an input object type + // Variable is not provided in variables JSON (or variables is nil) - still track input type usage if it's an input object type // This is important for breaking change detection defNode, ok := definition.NodeByNameStr(varTypeName) if ok && defNode.Kind == ast.NodeKindInputObjectTypeDefinition { diff --git a/router/pkg/graphqlschemausage/schemausage_test.go b/router/pkg/graphqlschemausage/schemausage_test.go index 29c652bdcd..0926c1de26 100644 --- a/router/pkg/graphqlschemausage/schemausage_test.go +++ b/router/pkg/graphqlschemausage/schemausage_test.go @@ -2788,6 +2788,318 @@ func TestSharedVariableAcrossSubgraphs(t *testing.T) { assert.Len(t, actualInput.SubgraphIDs, 3, "Should have exactly 3 subgraph IDs (no duplicates)") } +// TestNullListHandling verifies that null list values are properly tracked with IsNull flag. +// This is critical for breaking change detection when a nullable list type becomes non-nullable. +func TestNullListHandling(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + search(filter: SearchFilter!): [Result!]! + } + + type Result { + id: ID! + } + + input SearchFilter { + tags: [String] + categories: [String] + scores: [Int] + } + ` + + tests := []struct { + name string + variables string + expectedUsage []graphqlmetricsv1.InputUsageInfo + description string + }{ + { + name: "null list - tags is explicitly null", + variables: `{ + "filter": { + "tags": null, + "categories": ["cat1", "cat2"] + } + }`, + expectedUsage: []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "String", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "tags"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: true, // Null list should be marked as null + }, + { + NamedType: "String", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "categories"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, + }, + { + NamedType: "Int", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "scores"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: true, // Implicit null (missing) + }, + { + NamedType: "SearchFilter", + Path: []string{"SearchFilter"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, + }, + }, + description: "Explicit null list value should be tracked with IsNull=true, not skipped", + }, + { + name: "empty list - not null", + variables: `{ + "filter": { + "tags": [], + "categories": ["cat1"] + } + }`, + expectedUsage: []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "String", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "categories"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, + }, + { + NamedType: "Int", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "scores"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: true, // Implicit null (missing) + }, + { + NamedType: "SearchFilter", + Path: []string{"SearchFilter"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, + }, + }, + description: "Empty list should not produce any element entries (nothing to iterate), only implicit null for missing fields", + }, + { + name: "all lists null", + variables: `{ + "filter": { + "tags": null, + "categories": null, + "scores": null + } + }`, + expectedUsage: []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "String", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "tags"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: true, + }, + { + NamedType: "String", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "categories"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: true, + }, + { + NamedType: "Int", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "scores"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: true, + }, + { + NamedType: "SearchFilter", + Path: []string{"SearchFilter"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, + }, + }, + description: "All null lists should be tracked with IsNull=true", + }, + } + + operation := ` + query SearchQuery($filter: SearchFilter!) { + search(filter: $filter) { + id + } + } + ` + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration( + "search-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"search"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Result", FieldNames: []string{"id"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "SearchQuery", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(tt.variables) + require.NoError(t, err) + + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + assert.Len(t, inputUsageInfo, len(tt.expectedUsage), tt.description) + for i := range tt.expectedUsage { + assert.JSONEq(t, prettyJSON(t, &tt.expectedUsage[i]), prettyJSON(t, inputUsageInfo[i]), + "inputUsageInfo[%d] - %s", i, tt.description) + } + }) + } +} + +// TestNilVariablesHandling verifies that nil variables are handled gracefully without panicking. +// This is a defensive test to ensure the API doesn't crash when callers pass nil for variables. +func TestNilVariablesHandling(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + findEmployees(criteria: SearchInput): [Employee!]! + } + + type Employee { + id: ID! + } + + input SearchInput { + department: String + minAge: Int + } + ` + + operation := ` + query FindEmployees($criteria: SearchInput) { + findEmployees(criteria: $criteria) { + id + } + } + ` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration( + "employees-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"findEmployees"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Employee", FieldNames: []string{"id"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "FindEmployees", report) + require.False(t, report.HasErrors()) + + // Test with nil variables - should not panic + t.Run("nil variables for GetInputUsageInfo", func(t *testing.T) { + inputUsageInfo, err := GetInputUsageInfo(&op, &def, nil, generatedPlan, nil) + require.NoError(t, err) + + // Should track SearchInput as implicitly null since variable not provided + var searchInputUsage *graphqlmetricsv1.InputUsageInfo + for _, input := range inputUsageInfo { + if input.NamedType == "SearchInput" && len(input.Path) == 1 { + searchInputUsage = input + break + } + } + + require.NotNil(t, searchInputUsage, "Should track SearchInput even with nil variables") + assert.Equal(t, "SearchInput", searchInputUsage.NamedType) + assert.True(t, searchInputUsage.IsNull, "SearchInput should be null when variables is nil") + assert.Equal(t, []string{"employees-subgraph"}, searchInputUsage.SubgraphIDs) + }) + + t.Run("nil variables for GetArgumentUsageInfo", func(t *testing.T) { + // Should not panic + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, nil, generatedPlan, nil) + require.NoError(t, err) + + // Should track the criteria argument + require.Len(t, argumentUsageInfo, 1) + assert.Equal(t, "SearchInput", argumentUsageInfo[0].NamedType) + assert.Equal(t, []string{"findEmployees", "criteria"}, argumentUsageInfo[0].Path) + // With nil variables, we can't determine if the variable value is null + // so IsNull will be false (default behavior when variable can't be resolved) + assert.False(t, argumentUsageInfo[0].IsNull) + }) +} + func prettyJSON(t *testing.T, v interface{}) string { b, err := json.MarshalIndent(v, "", " ") require.NoError(t, err) From 64e14cd61e33b7e5ad1dd7ad10a8b294dc5c4ac2 Mon Sep 17 00:00:00 2001 From: StarpTech Date: Mon, 1 Dec 2025 14:03:24 +0100 Subject: [PATCH 15/22] fix: variable usage for empty list, refactor to use just one walk over operation --- router/pkg/graphqlschemausage/schemausage.go | 812 ++++++++++-------- .../graphqlschemausage/schemausage_test.go | 9 +- 2 files changed, 447 insertions(+), 374 deletions(-) diff --git a/router/pkg/graphqlschemausage/schemausage.go b/router/pkg/graphqlschemausage/schemausage.go index e7d438297f..d114de0030 100644 --- a/router/pkg/graphqlschemausage/schemausage.go +++ b/router/pkg/graphqlschemausage/schemausage.go @@ -32,22 +32,27 @@ // // For list-typed fields: // - Null list values (e.g., tags: null where tags: [String]) are tracked with IsNull=true -// - Empty lists (e.g., tags: []) do not produce element entries (nothing to iterate) +// - Empty lists (e.g., tags: []) are tracked with IsNull=false (field is used, just no elements) // - Null elements within lists (e.g., tags: ["a", null, "b"]) are NOT individually tracked // (the field-level usage already indicates the list type is being used) // // # Design Components // -// The package uses dependency injection and separation of concerns: +// The package uses a unified AST walk with pluggable collectors: // -// - pathBuilder: Reusable path stack operations for field traversal -// - nullValueDetector: Centralized null detection for values and variables with remapping support -// - subgraphMapper: Unified interface for field and variable → subgraph ID resolution -// - inputTypeResolver: Type system queries for input object field definitions -// - inputTraverser: Input traversal with implicit null tracking +// - walkContext: Shared state for AST traversal (path, stacks, documents) +// - collector: Interface for components that collect data during the walk +// - unifiedVisitor: Single AST walker that delegates to multiple collectors // -// These components are composed by visitor types to provide clean, testable, and maintainable -// schema usage extraction. +// Individual collectors handle specific concerns: +// - variableSubgraphCollector: Maps variables to subgraph IDs +// - argumentUsageCollector: Collects argument usage metrics +// - implicitInputCollector: Tracks implicit null input type arguments +// +// This design enables: +// - Single O(n) AST walk instead of multiple passes +// - Independent testing of each collector +// - Easy addition of new collectors without changing walk infrastructure package graphqlschemausage import ( @@ -86,32 +91,23 @@ func GetTypeFieldUsageInfo(operationPlan plan.Plan) []*TypeFieldUsageInfo { // The variables parameter can be nil, which is treated as "no variables provided". When nil, // null detection for variable-based arguments will default to false (cannot determine nullness). func GetArgumentUsageInfo(operation, definition *ast.Document, variables *astjson.Value, operationPlan plan.Plan, remapVariables map[string]string) ([]*graphqlmetrics.ArgumentUsageInfo, error) { - subgraphMapper := newSubgraphMapper(operationPlan, operation, definition) + fieldSubgraphMap := buildFieldSubgraphIDMap(operationPlan) nullDetector := newNullValueDetector(operation, variables, remapVariables) - walker := astvisitor.NewWalker(48) - visitor := &argumentUsageInfoVisitor{ - definition: definition, - operation: operation, - walker: &walker, - subgraphMapper: subgraphMapper, - nullDetector: nullDetector, - pathBuilder: newPathBuilder(8), - usage: make([]*graphqlmetrics.ArgumentUsageInfo, 0, 16), - currentFieldRef: -1, - providedArgumentsStack: make([]map[string]struct{}, 0, 8), - fieldEnclosingNodeStack: make([]ast.Node, 0, 8), - } - walker.RegisterEnterArgumentVisitor(visitor) - walker.RegisterEnterFieldVisitor(visitor) - walker.RegisterLeaveFieldVisitor(visitor) - rep := &operationreport.Report{} - walker.Walk(operation, definition, rep) - if rep.HasErrors() { - return nil, rep + // Create argument collector (no variable mapping needed for argument usage) + argCollector := newArgumentUsageCollector(operation, definition, nullDetector) + + // Run unified walk + ctx := newWalkContext(operation, definition) + err := runUnifiedWalk(ctx, argCollector) + if err != nil { + return nil, err } - return visitor.usage, nil + // Finalize argument usage with subgraph IDs + argCollector.finalizeSubgraphIDs(fieldSubgraphMap) + + return argCollector.usage, nil } // GetInputUsageInfo extracts input usage by traversing variable values. Tracks both explicit @@ -121,17 +117,36 @@ func GetArgumentUsageInfo(operation, definition *ast.Document, variables *astjso // The variables parameter can be nil, which is treated as "no variables provided". When nil, // input object types are still tracked with IsNull=true for breaking change detection. func GetInputUsageInfo(operation, definition *ast.Document, variables *astjson.Value, operationPlan plan.Plan, remapVariables map[string]string) ([]*graphqlmetrics.InputUsageInfo, error) { - subgraphMapper := newSubgraphMapper(operationPlan, operation, definition) - traverser := newInputTraverser(definition, subgraphMapper) + fieldSubgraphMap := buildFieldSubgraphIDMap(operationPlan) nullDetector := newNullValueDetector(operation, variables, remapVariables) + // Create collectors + varCollector := newVariableSubgraphCollector(operation, fieldSubgraphMap) + inputCollector := newImplicitInputCollector(definition) + + // Run unified walk + ctx := newWalkContext(operation, definition) + err := runUnifiedWalk(ctx, varCollector, inputCollector) + if err != nil { + return nil, err + } + + // Build subgraph mapper from collected variable mappings + subgraphMapper := &subgraphMapper{ + fieldToSubgraphs: fieldSubgraphMap, + variableToSubgraphs: varCollector.variableMap, + } + + // Create input traverser and process variable definitions + traverser := newInputTraverser(definition, subgraphMapper) + // Track input usage from variable definitions for i := range operation.VariableDefinitions { processVariableDefinition(traverser, operation, definition, variables, nullDetector, subgraphMapper, i) } - // Track input usage from implicitly null input type arguments - collectImplicitArgumentInputUsage(operation, definition, subgraphMapper, traverser) + // Finalize implicit input usage with subgraph IDs + inputCollector.finalizeUsage(traverser, fieldSubgraphMap) return traverser.usage, nil } @@ -191,10 +206,12 @@ func (t *TypeFieldUsageInfo) IntoGraphQLMetrics() *graphqlmetrics.TypeFieldUsage } } +// typeFieldUsageInfoVisitor walks the execution plan to extract type and field usage. type typeFieldUsageInfoVisitor struct { typeFieldUsageInfo []*TypeFieldUsageInfo } +// visitNode recursively traverses the resolve tree to extract field usage info. func (p *typeFieldUsageInfoVisitor) visitNode(node resolve.Node, path []string) { switch t := node.(type) { case *resolve.Object: @@ -234,6 +251,149 @@ func (p *typeFieldUsageInfoVisitor) visitNode(node resolve.Node, path []string) } } +// ============================================ +// Unified AST Walk Infrastructure +// ============================================ + +// walkContext provides shared state for AST traversal. +// It manages common resources like path building and enclosing type tracking +// that multiple collectors need during the walk. +type walkContext struct { + walker *astvisitor.Walker + operation *ast.Document + definition *ast.Document + pathBuilder *pathBuilder + enclosingStack []ast.Node // Stack of enclosing type definitions + argumentsStack []map[string]struct{} // Stack tracking provided arguments per field +} + +func newWalkContext(operation, definition *ast.Document) *walkContext { + return &walkContext{ + operation: operation, + definition: definition, + pathBuilder: newPathBuilder(8), + enclosingStack: make([]ast.Node, 0, 8), + argumentsStack: make([]map[string]struct{}, 0, 8), + } +} + +// PathKey returns the current field path as a dot-separated string +func (c *walkContext) PathKey() string { + return c.pathBuilder.key() +} + +// CurrentEnclosingNode returns the enclosing type definition for the current field +func (c *walkContext) CurrentEnclosingNode() (ast.Node, bool) { + if len(c.enclosingStack) == 0 { + return ast.Node{}, false + } + return c.enclosingStack[len(c.enclosingStack)-1], true +} + +// CurrentProvidedArguments returns the set of provided arguments for the current field +func (c *walkContext) CurrentProvidedArguments() map[string]struct{} { + if len(c.argumentsStack) == 0 { + return nil + } + return c.argumentsStack[len(c.argumentsStack)-1] +} + +// TrackProvidedArgument records that an argument was provided for the current field +func (c *walkContext) TrackProvidedArgument(argName string) { + if len(c.argumentsStack) == 0 { + return + } + stackIdx := len(c.argumentsStack) - 1 + if c.argumentsStack[stackIdx] == nil { + c.argumentsStack[stackIdx] = make(map[string]struct{}, 4) + } + c.argumentsStack[stackIdx][argName] = struct{}{} +} + +// collector is the interface for components that collect data during AST traversal. +// Each collector handles a specific concern (variable mapping, argument usage, etc.) +// and can be tested independently. +type collector interface { + // EnterField is called when entering a field during AST traversal + EnterField(ctx *walkContext, ref int) + // LeaveField is called when leaving a field during AST traversal + LeaveField(ctx *walkContext, ref int) + // EnterArgument is called when entering an argument during AST traversal + EnterArgument(ctx *walkContext, ref int) +} + +// unifiedVisitor walks the AST once and delegates to multiple collectors. +// It manages the shared walk context and invokes collectors at each AST node. +type unifiedVisitor struct { + ctx *walkContext + collectors []collector +} + +func (v *unifiedVisitor) EnterField(ref int) { + // Update shared context + v.ctx.enclosingStack = append(v.ctx.enclosingStack, v.ctx.walker.EnclosingTypeDefinition) + v.ctx.argumentsStack = append(v.ctx.argumentsStack, nil) + fieldName := v.ctx.operation.FieldNameString(ref) + v.ctx.pathBuilder.push(fieldName) + + // Delegate to collectors + for _, c := range v.collectors { + c.EnterField(v.ctx, ref) + } +} + +func (v *unifiedVisitor) LeaveField(ref int) { + // Delegate to collectors first (they may need context state) + for _, c := range v.collectors { + c.LeaveField(v.ctx, ref) + } + + // Update shared context + v.ctx.pathBuilder.pop() + if len(v.ctx.enclosingStack) > 0 { + v.ctx.enclosingStack = v.ctx.enclosingStack[:len(v.ctx.enclosingStack)-1] + } + if len(v.ctx.argumentsStack) > 0 { + v.ctx.argumentsStack = v.ctx.argumentsStack[:len(v.ctx.argumentsStack)-1] + } +} + +func (v *unifiedVisitor) EnterArgument(ref int) { + // Track provided argument in shared context + argName := v.ctx.operation.ArgumentNameBytes(ref) + anc := v.ctx.walker.Ancestors[len(v.ctx.walker.Ancestors)-1] + if anc.Kind == ast.NodeKindField { + v.ctx.TrackProvidedArgument(string(argName)) + } + + // Delegate to collectors + for _, c := range v.collectors { + c.EnterArgument(v.ctx, ref) + } +} + +// runUnifiedWalk executes a single AST walk with the given collectors. +func runUnifiedWalk(ctx *walkContext, collectors ...collector) error { + walker := astvisitor.NewWalker(48) + ctx.walker = &walker + + visitor := &unifiedVisitor{ + ctx: ctx, + collectors: collectors, + } + + walker.RegisterEnterFieldVisitor(visitor) + walker.RegisterLeaveFieldVisitor(visitor) + walker.RegisterEnterArgumentVisitor(visitor) + + rep := &operationreport.Report{} + walker.Walk(ctx.operation, ctx.definition, rep) + if rep.HasErrors() { + return rep + } + return nil +} + // ============================================ // Path Builder (Shared Infrastructure) // ============================================ @@ -257,12 +417,6 @@ func (p *pathBuilder) pop() { } } -func (p *pathBuilder) copy() []string { - result := make([]string, len(p.stack)) - copy(result, p.stack) - return result -} - func (p *pathBuilder) key() string { return strings.Join(p.stack, ".") } @@ -327,14 +481,6 @@ type subgraphMapper struct { variableToSubgraphs map[string][]string } -func newSubgraphMapper(operationPlan plan.Plan, operation, definition *ast.Document) *subgraphMapper { - mapper := &subgraphMapper{ - fieldToSubgraphs: buildFieldSubgraphIDMap(operationPlan), - } - mapper.variableToSubgraphs = buildVariableSubgraphMap(operation, definition, mapper.fieldToSubgraphs) - return mapper -} - // getFieldSubgraphs returns subgraph IDs for a field path func (s *subgraphMapper) getFieldSubgraphs(pathKey string) []string { return s.fieldToSubgraphs[pathKey] @@ -360,11 +506,13 @@ func buildFieldSubgraphIDMap(operationPlan plan.Plan) map[string][]string { return collector.fieldMap } +// subgraphIDCollector walks the execution plan to extract field path → subgraph ID mappings. type subgraphIDCollector struct { fieldMap map[string][]string pathStack []string } +// collectFromNode recursively extracts field → subgraph ID mappings from the resolve tree. func (c *subgraphIDCollector) collectFromNode(node resolve.Node) { switch t := node.(type) { case *resolve.Object: @@ -383,45 +531,64 @@ func (c *subgraphIDCollector) collectFromNode(node resolve.Node) { } } -// buildVariableSubgraphMap maps variable names to subgraph IDs by analyzing which fields use them. -func buildVariableSubgraphMap(operation, definition *ast.Document, fieldSubgraphMap map[string][]string) map[string][]string { - variableMap := make(map[string][]string) - walker := astvisitor.NewWalker(48) - collector := &variableSubgraphCollector{ - walker: &walker, - operation: operation, - definition: definition, - fieldSubgraphMap: fieldSubgraphMap, - variableMap: variableMap, - pathBuilder: newPathBuilder(8), +// mergeSubgraphIDs combines two slices of subgraph IDs, removing duplicates. +func mergeSubgraphIDs(a, b []string) []string { + if len(a) == 0 { + return b } - walker.RegisterEnterFieldVisitor(collector) - walker.RegisterLeaveFieldVisitor(collector) - walker.RegisterEnterArgumentVisitor(collector) - rep := &operationreport.Report{} - walker.Walk(operation, definition, rep) - return variableMap + if len(b) == 0 { + return a + } + + seen := make(map[string]bool, len(a)+len(b)) + result := make([]string, 0, len(a)+len(b)) + + for _, id := range a { + if !seen[id] { + seen[id] = true + result = append(result, id) + } + } + + for _, id := range b { + if !seen[id] { + seen[id] = true + result = append(result, id) + } + } + + return result } +// ============================================ +// Variable Subgraph Collector +// ============================================ + +// variableSubgraphCollector maps variable names to subgraph IDs by tracking +// which fields use each variable. Implements the collector interface. type variableSubgraphCollector struct { - walker *astvisitor.Walker operation *ast.Document - definition *ast.Document fieldSubgraphMap map[string][]string variableMap map[string][]string - pathBuilder *pathBuilder } -func (v *variableSubgraphCollector) EnterField(ref int) { - fieldName := v.operation.FieldNameString(ref) - v.pathBuilder.push(fieldName) +func newVariableSubgraphCollector(operation *ast.Document, fieldSubgraphMap map[string][]string) *variableSubgraphCollector { + return &variableSubgraphCollector{ + operation: operation, + fieldSubgraphMap: fieldSubgraphMap, + variableMap: make(map[string][]string), + } +} + +func (v *variableSubgraphCollector) EnterField(_ *walkContext, _ int) { + // No action needed - context handles path building } -func (v *variableSubgraphCollector) LeaveField(_ int) { - v.pathBuilder.pop() +func (v *variableSubgraphCollector) LeaveField(_ *walkContext, _ int) { + // No action needed - context handles path building } -func (v *variableSubgraphCollector) EnterArgument(ref int) { +func (v *variableSubgraphCollector) EnterArgument(ctx *walkContext, ref int) { arg := v.operation.Arguments[ref] if arg.Value.Kind != ast.ValueKindVariable { @@ -433,163 +600,225 @@ func (v *variableSubgraphCollector) EnterArgument(ref int) { return } - pathKey := v.pathBuilder.key() + pathKey := ctx.PathKey() if subgraphIDs, exists := v.fieldSubgraphMap[pathKey]; exists { v.variableMap[varName] = mergeSubgraphIDs(v.variableMap[varName], subgraphIDs) } } -// mergeSubgraphIDs combines two slices of subgraph IDs, removing duplicates. -func mergeSubgraphIDs(a, b []string) []string { - if len(a) == 0 { - return b - } - if len(b) == 0 { - return a - } - - seen := make(map[string]bool, len(a)+len(b)) - result := make([]string, 0, len(a)+len(b)) +// ============================================ +// Argument Usage Collector +// ============================================ - for _, id := range a { - if !seen[id] { - seen[id] = true - result = append(result, id) - } - } +// argumentUsageCollector collects argument usage metrics during AST traversal. +// It tracks both provided arguments and implicit null arguments. +// Implements the collector interface. +type argumentUsageCollector struct { + operation *ast.Document + definition *ast.Document + nullDetector *nullValueDetector + usage []*graphqlmetrics.ArgumentUsageInfo + // Temporary storage for path keys, resolved after walk when subgraph map is complete + pathKeyPerUsage []string +} - for _, id := range b { - if !seen[id] { - seen[id] = true - result = append(result, id) - } +func newArgumentUsageCollector(operation, definition *ast.Document, nullDetector *nullValueDetector) *argumentUsageCollector { + return &argumentUsageCollector{ + operation: operation, + definition: definition, + nullDetector: nullDetector, + usage: make([]*graphqlmetrics.ArgumentUsageInfo, 0, 16), + pathKeyPerUsage: make([]string, 0, 16), } +} - return result +func (a *argumentUsageCollector) EnterField(_ *walkContext, _ int) { + // No action needed - context handles path and stack management } -// ============================================ -// Argument Usage Visitor -// ============================================ +func (a *argumentUsageCollector) LeaveField(ctx *walkContext, ref int) { + // Track implicit null arguments (defined in schema but not provided) + a.trackImplicitNullArguments(ctx, ref) +} -type argumentUsageInfoVisitor struct { - walker *astvisitor.Walker - definition *ast.Document - operation *ast.Document - fieldEnclosingNodeStack []ast.Node // Stack to track enclosing nodes for nested fields - subgraphMapper *subgraphMapper - nullDetector *nullValueDetector - pathBuilder *pathBuilder - usage []*graphqlmetrics.ArgumentUsageInfo - currentFieldRef int - providedArgumentsStack []map[string]struct{} // Stack of maps to track which arguments were provided at each level -} - -func (a *argumentUsageInfoVisitor) EnterField(ref int) { - // Push current enclosing node onto stack - a.fieldEnclosingNodeStack = append(a.fieldEnclosingNodeStack, a.walker.EnclosingTypeDefinition) - a.currentFieldRef = ref - // Push nil - will lazily allocate map only if field has arguments - a.providedArgumentsStack = append(a.providedArgumentsStack, nil) - fieldName := a.operation.FieldNameString(ref) - a.pathBuilder.push(fieldName) -} - -func (a *argumentUsageInfoVisitor) LeaveField(ref int) { - // Track implicit null arguments (arguments defined in schema but not provided in operation) - a.trackImplicitNullArguments(ref) - a.pathBuilder.pop() - a.currentFieldRef = -1 - // Pop the enclosing node from stack - if len(a.fieldEnclosingNodeStack) > 0 { - a.fieldEnclosingNodeStack = a.fieldEnclosingNodeStack[:len(a.fieldEnclosingNodeStack)-1] - } - // Pop the provided arguments map - if len(a.providedArgumentsStack) > 0 { - a.providedArgumentsStack = a.providedArgumentsStack[:len(a.providedArgumentsStack)-1] - } -} - -func (a *argumentUsageInfoVisitor) EnterArgument(ref int) { +func (a *argumentUsageCollector) EnterArgument(ctx *walkContext, ref int) { argName := a.operation.ArgumentNameBytes(ref) - anc := a.walker.Ancestors[len(a.walker.Ancestors)-1] + anc := ctx.walker.Ancestors[len(ctx.walker.Ancestors)-1] if anc.Kind != ast.NodeKindField { return } - // Track that this argument was provided in the current field's map - // Lazily allocate map only when first argument is encountered - if len(a.providedArgumentsStack) > 0 { - stackIdx := len(a.providedArgumentsStack) - 1 - if a.providedArgumentsStack[stackIdx] == nil { - a.providedArgumentsStack[stackIdx] = make(map[string]struct{}, 4) // Capacity hint: most fields have 1-4 args - } - a.providedArgumentsStack[stackIdx][string(argName)] = struct{}{} - } - - // Get enclosing node from top of stack - if len(a.fieldEnclosingNodeStack) == 0 { + enclosingNode, ok := ctx.CurrentEnclosingNode() + if !ok { return } - fieldEnclosingNode := a.fieldEnclosingNodeStack[len(a.fieldEnclosingNodeStack)-1] fieldName := a.operation.FieldNameBytes(anc.Ref) - enclosingTypeName := a.definition.NodeNameBytes(fieldEnclosingNode) - argDef := a.definition.NodeFieldDefinitionArgumentDefinitionByName(fieldEnclosingNode, fieldName, argName) + enclosingTypeName := a.definition.NodeNameBytes(enclosingNode) + argDef := a.definition.NodeFieldDefinitionArgumentDefinitionByName(enclosingNode, fieldName, argName) if argDef == -1 { return } argType := a.definition.InputValueDefinitionType(argDef) typeName := a.definition.ResolveTypeNameBytes(argType) - // Get subgraph IDs using the path builder - subgraphIDs := a.subgraphMapper.getFieldSubgraphs(a.pathBuilder.key()) - - // Check if argument is null using null detector + // Check if argument is null arg := a.operation.Arguments[ref] isNull := a.nullDetector.isValueNull(arg.Value) + // Store usage info (subgraph IDs will be resolved later) a.usage = append(a.usage, &graphqlmetrics.ArgumentUsageInfo{ - Path: []string{string(fieldName), string(argName)}, - TypeName: string(enclosingTypeName), - NamedType: string(typeName), - SubgraphIDs: subgraphIDs, - IsNull: isNull, + Path: []string{string(fieldName), string(argName)}, + TypeName: string(enclosingTypeName), + NamedType: string(typeName), + IsNull: isNull, }) + a.pathKeyPerUsage = append(a.pathKeyPerUsage, ctx.PathKey()) } -// trackImplicitNullArguments tracks arguments defined in the schema but not provided in the operation. -// This is critical for breaking change detection - we need to know if arguments are being used or not. -func (a *argumentUsageInfoVisitor) trackImplicitNullArguments(fieldRef int) { - // Get enclosing node from top of stack - if len(a.fieldEnclosingNodeStack) == 0 { +// trackImplicitNullArguments tracks arguments defined in schema but not provided in operation. +func (a *argumentUsageCollector) trackImplicitNullArguments(ctx *walkContext, fieldRef int) { + enclosingNode, ok := ctx.CurrentEnclosingNode() + if !ok || enclosingNode.Kind == ast.NodeKindUnknown { return } - fieldEnclosingNode := a.fieldEnclosingNodeStack[len(a.fieldEnclosingNodeStack)-1] - if fieldEnclosingNode.Kind == ast.NodeKindUnknown { + fieldName := a.operation.FieldNameBytes(fieldRef) + // Skip introspection fields + if len(fieldName) > 1 && fieldName[0] == '_' && fieldName[1] == '_' { return } + enclosingTypeName := a.definition.NodeNameBytes(enclosingNode) + + // Find all arguments defined for this field + argumentRefs := getFieldArgumentRefs(a.definition, enclosingNode, fieldName) + + // Get provided arguments from context + providedArguments := ctx.CurrentProvidedArguments() + + pathKey := ctx.PathKey() + + // Track arguments that are defined but not provided + for _, argRef := range argumentRefs { + argName := string(a.definition.InputValueDefinitionNameString(argRef)) + + if providedArguments != nil { + if _, provided := providedArguments[argName]; provided { + continue + } + } + + argType := a.definition.InputValueDefinitionType(argRef) + typeName := a.definition.ResolveTypeNameString(argType) + + a.usage = append(a.usage, &graphqlmetrics.ArgumentUsageInfo{ + Path: []string{string(fieldName), argName}, + TypeName: string(enclosingTypeName), + NamedType: typeName, + IsNull: true, + }) + a.pathKeyPerUsage = append(a.pathKeyPerUsage, pathKey) + } +} + +// finalizeSubgraphIDs resolves subgraph IDs for all collected usage after the walk completes. +func (a *argumentUsageCollector) finalizeSubgraphIDs(fieldSubgraphMap map[string][]string) { + for i, pathKey := range a.pathKeyPerUsage { + a.usage[i].SubgraphIDs = fieldSubgraphMap[pathKey] + } +} + +// ============================================ +// Implicit Input Collector +// ============================================ + +// implicitInputUsage stores data needed to finalize implicit input usage after the walk. +type implicitInputUsage struct { + typeName string + pathKey string +} + +// implicitInputCollector tracks implicit null input type arguments during AST traversal. +// Implements the collector interface. +type implicitInputCollector struct { + definition *ast.Document + implicitInputs []implicitInputUsage +} + +func newImplicitInputCollector(definition *ast.Document) *implicitInputCollector { + return &implicitInputCollector{ + definition: definition, + implicitInputs: make([]implicitInputUsage, 0, 8), + } +} + +func (c *implicitInputCollector) EnterField(_ *walkContext, _ int) { + // No action needed +} + +func (c *implicitInputCollector) LeaveField(ctx *walkContext, ref int) { + c.trackImplicitInputTypeArguments(ctx, ref) +} + +func (c *implicitInputCollector) EnterArgument(_ *walkContext, _ int) { + // Argument tracking is handled by walkContext +} + +func (c *implicitInputCollector) trackImplicitInputTypeArguments(ctx *walkContext, fieldRef int) { + enclosingNode, ok := ctx.CurrentEnclosingNode() + if !ok || enclosingNode.Kind == ast.NodeKindUnknown { + return + } + + fieldName := ctx.operation.FieldNameBytes(fieldRef) // Skip introspection fields - fieldName := a.operation.FieldNameBytes(fieldRef) if len(fieldName) > 1 && fieldName[0] == '_' && fieldName[1] == '_' { return } - enclosingTypeName := a.definition.NodeNameBytes(fieldEnclosingNode) + // Find all arguments defined for this field + argumentRefs := getFieldArgumentRefs(c.definition, enclosingNode, fieldName) + + providedArgs := ctx.CurrentProvidedArguments() + pathKey := ctx.PathKey() - // Get subgraph IDs for this field - subgraphIDs := a.subgraphMapper.getFieldSubgraphs(a.pathBuilder.key()) + // Track input types for implicitly null arguments + for _, argRef := range argumentRefs { + argName := string(c.definition.InputValueDefinitionNameString(argRef)) - // Find all arguments defined for this field in the schema + if providedArgs != nil { + if _, provided := providedArgs[argName]; provided { + continue + } + } + + argType := c.definition.InputValueDefinitionType(argRef) + typeName := c.definition.ResolveTypeNameString(argType) + + // Check if this is an input object type + defNode, ok := c.definition.NodeByNameStr(typeName) + if !ok || defNode.Kind != ast.NodeKindInputObjectTypeDefinition { + continue + } + + c.implicitInputs = append(c.implicitInputs, implicitInputUsage{ + typeName: typeName, + pathKey: pathKey, + }) + } +} + +// getFieldArgumentRefs returns argument definition refs for a field in the schema. +// Shared helper used by both argumentUsageCollector and implicitInputCollector. +func getFieldArgumentRefs(definition *ast.Document, enclosingNode ast.Node, fieldName []byte) []int { var argumentRefs []int - switch fieldEnclosingNode.Kind { + switch enclosingNode.Kind { case ast.NodeKindObjectTypeDefinition: - fieldDefs := a.definition.ObjectTypeDefinitions[fieldEnclosingNode.Ref].FieldsDefinition.Refs + fieldDefs := definition.ObjectTypeDefinitions[enclosingNode.Ref].FieldsDefinition.Refs for _, fieldDefRef := range fieldDefs { - fieldDef := a.definition.FieldDefinitions[fieldDefRef] - if bytes.Equal(a.definition.FieldDefinitionNameBytes(fieldDefRef), fieldName) { + fieldDef := definition.FieldDefinitions[fieldDefRef] + if bytes.Equal(definition.FieldDefinitionNameBytes(fieldDefRef), fieldName) { if fieldDef.HasArgumentsDefinitions { argumentRefs = fieldDef.ArgumentsDefinition.Refs } @@ -597,10 +826,10 @@ func (a *argumentUsageInfoVisitor) trackImplicitNullArguments(fieldRef int) { } } case ast.NodeKindInterfaceTypeDefinition: - fieldDefs := a.definition.InterfaceTypeDefinitions[fieldEnclosingNode.Ref].FieldsDefinition.Refs + fieldDefs := definition.InterfaceTypeDefinitions[enclosingNode.Ref].FieldsDefinition.Refs for _, fieldDefRef := range fieldDefs { - fieldDef := a.definition.FieldDefinitions[fieldDefRef] - if bytes.Equal(a.definition.FieldDefinitionNameBytes(fieldDefRef), fieldName) { + fieldDef := definition.FieldDefinitions[fieldDefRef] + if bytes.Equal(definition.FieldDefinitionNameBytes(fieldDefRef), fieldName) { if fieldDef.HasArgumentsDefinitions { argumentRefs = fieldDef.ArgumentsDefinition.Refs } @@ -608,34 +837,18 @@ func (a *argumentUsageInfoVisitor) trackImplicitNullArguments(fieldRef int) { } } } + return argumentRefs +} - // Get the provided arguments map for this field level - var providedArguments map[string]struct{} - if len(a.providedArgumentsStack) > 0 { - providedArguments = a.providedArgumentsStack[len(a.providedArgumentsStack)-1] - } - - // Track arguments that are defined but not provided (implicitly null) - for _, argRef := range argumentRefs { - argName := string(a.definition.InputValueDefinitionNameString(argRef)) - - // Skip if this argument was already provided - if providedArguments != nil { - if _, provided := providedArguments[argName]; provided { - continue - } - } - - argType := a.definition.InputValueDefinitionType(argRef) - typeName := a.definition.ResolveTypeNameString(argType) - - // Track argument as implicitly null - a.usage = append(a.usage, &graphqlmetrics.ArgumentUsageInfo{ - Path: []string{string(fieldName), argName}, - TypeName: string(enclosingTypeName), - NamedType: typeName, +// finalizeUsage adds implicit input usage to the traverser with resolved subgraph IDs. +func (c *implicitInputCollector) finalizeUsage(traverser *inputTraverser, fieldSubgraphMap map[string][]string) { + for _, input := range c.implicitInputs { + subgraphIDs := fieldSubgraphMap[input.pathKey] + traverser.appendUniqueUsage(&graphqlmetrics.InputUsageInfo{ + NamedType: input.typeName, + Path: []string{input.typeName}, SubgraphIDs: subgraphIDs, - IsNull: true, // Implicitly null (not provided) + IsNull: true, }) } } @@ -668,7 +881,6 @@ func (r *inputTypeResolver) resolveInputFields(typeName string) []inputFieldInfo fields = append(fields, inputFieldInfo{ name: string(r.definition.Input.ByteSlice(fieldDef.Name)), typeName: r.definition.ResolveTypeNameString(fieldDef.Type), - isList: r.definition.TypeIsList(fieldDef.Type), }) } @@ -683,11 +895,10 @@ func (r *inputTypeResolver) getNodeRef(typeName string) int { return -1 } -// inputFieldInfo represents an input object field's name, type, and list indicator. +// inputFieldInfo represents an input object field's name and type. type inputFieldInfo struct { name string typeName string - isList bool } // ============================================ @@ -734,7 +945,7 @@ func (t *inputTraverser) traverse(jsonValue *astjson.Value, fieldName, typeName, // Dispatch based on type kind switch defNode.Kind { case ast.NodeKindInputObjectTypeDefinition: - t.traverseInputObject(jsonValue, fieldName, typeName, parentTypeName, defNode, usageInfo) + t.traverseInputObject(jsonValue, fieldName, typeName, parentTypeName, usageInfo) case ast.NodeKindEnumTypeDefinition: t.traverseEnum(jsonValue, usageInfo) case ast.NodeKindScalarTypeDefinition: @@ -768,7 +979,7 @@ func (t *inputTraverser) createUsageInfo(fieldName, typeName, parentTypeName str } // traverseInputObject handles input object traversal with implicit null tracking -func (t *inputTraverser) traverseInputObject(jsonValue *astjson.Value, fieldName, typeName, parentTypeName string, defNode ast.Node, usageInfo *graphqlmetrics.InputUsageInfo) { +func (t *inputTraverser) traverseInputObject(jsonValue *astjson.Value, fieldName, typeName, parentTypeName string, usageInfo *graphqlmetrics.InputUsageInfo) { switch jsonValue.Type() { case astjson.TypeArray: // Note: arrays at this level mean list of input objects (e.g., [InputType]) @@ -827,7 +1038,14 @@ func (t *inputTraverser) processField(fieldName string, value *astjson.Value, pa } // List is not null - iterate through elements - for _, arrayValue := range value.GetArray() { + arr := value.GetArray() + if len(arr) == 0 { + // Empty list - still track the field usage for breaking change detection. + // The schema dependency exists even if no elements are provided. + t.traverse(value, fieldName, fieldTypeName, parentTypeName, false) + return + } + for _, arrayValue := range arr { t.traverse(arrayValue, fieldName, fieldTypeName, parentTypeName, false) } } else { @@ -861,6 +1079,9 @@ func (t *inputTraverser) traverseEnum(jsonValue *astjson.Value, usageInfo *graph } } +// appendUniqueUsage adds usage info if not already present. +// Note: Uses O(n) linear scan for deduplication. For very large operations with thousands +// of input fields, consider using a map-based approach for O(1) lookups. func (t *inputTraverser) appendUniqueUsage(info *graphqlmetrics.InputUsageInfo) { for _, u := range t.usage { if t.infoEquals(u, info) { @@ -870,6 +1091,7 @@ func (t *inputTraverser) appendUniqueUsage(info *graphqlmetrics.InputUsageInfo) t.usage = append(t.usage, info) } +// infoEquals checks deep equality between two InputUsageInfo instances. func (t *inputTraverser) infoEquals(a, b *graphqlmetrics.InputUsageInfo) bool { if a.Count != b.Count { return false @@ -961,159 +1183,3 @@ func processVariableDefinition(traverser *inputTraverser, operation, definition isNull := jsonField.Type() == astjson.TypeNull traverser.traverse(jsonField, originalVarName, varTypeName, "", isNull) } - -// collectImplicitArgumentInputUsage walks the operation and tracks input usage for -// implicitly null input type arguments (arguments defined in schema but not provided in operation). -func collectImplicitArgumentInputUsage(operation, definition *ast.Document, subgraphMapper *subgraphMapper, traverser *inputTraverser) { - walker := astvisitor.NewWalker(48) - collector := &implicitArgumentInputCollector{ - walker: &walker, - definition: definition, - operation: operation, - subgraphMapper: subgraphMapper, - traverser: traverser, - pathBuilder: newPathBuilder(8), - argumentsStack: make([]map[string]struct{}, 0, 8), - enclosingStack: make([]ast.Node, 0, 8), - } - walker.RegisterEnterFieldVisitor(collector) - walker.RegisterLeaveFieldVisitor(collector) - walker.RegisterEnterArgumentVisitor(collector) - rep := &operationreport.Report{} - walker.Walk(operation, definition, rep) -} - -// implicitArgumentInputCollector collects input usage for implicitly null input type arguments -type implicitArgumentInputCollector struct { - walker *astvisitor.Walker - definition *ast.Document - operation *ast.Document - subgraphMapper *subgraphMapper - traverser *inputTraverser - pathBuilder *pathBuilder - argumentsStack []map[string]struct{} // Track provided arguments per field - enclosingStack []ast.Node -} - -func (c *implicitArgumentInputCollector) EnterField(ref int) { - c.enclosingStack = append(c.enclosingStack, c.walker.EnclosingTypeDefinition) - c.argumentsStack = append(c.argumentsStack, nil) - fieldName := c.operation.FieldNameString(ref) - c.pathBuilder.push(fieldName) -} - -func (c *implicitArgumentInputCollector) LeaveField(ref int) { - // Check for implicit null input type arguments - c.trackImplicitInputTypeArguments(ref) - - c.pathBuilder.pop() - if len(c.enclosingStack) > 0 { - c.enclosingStack = c.enclosingStack[:len(c.enclosingStack)-1] - } - if len(c.argumentsStack) > 0 { - c.argumentsStack = c.argumentsStack[:len(c.argumentsStack)-1] - } -} - -func (c *implicitArgumentInputCollector) EnterArgument(ref int) { - argName := c.operation.ArgumentNameBytes(ref) - anc := c.walker.Ancestors[len(c.walker.Ancestors)-1] - if anc.Kind != ast.NodeKindField { - return - } - - // Lazily allocate map and track provided argument - if len(c.argumentsStack) > 0 { - stackIdx := len(c.argumentsStack) - 1 - if c.argumentsStack[stackIdx] == nil { - c.argumentsStack[stackIdx] = make(map[string]struct{}, 4) - } - c.argumentsStack[stackIdx][string(argName)] = struct{}{} - } -} - -func (c *implicitArgumentInputCollector) trackImplicitInputTypeArguments(fieldRef int) { - if len(c.enclosingStack) == 0 { - return - } - enclosingNode := c.enclosingStack[len(c.enclosingStack)-1] - if enclosingNode.Kind == ast.NodeKindUnknown { - return - } - - fieldName := c.operation.FieldNameBytes(fieldRef) - // Skip introspection fields - if len(fieldName) > 1 && fieldName[0] == '_' && fieldName[1] == '_' { - return - } - - // Get subgraph IDs for this field - subgraphIDs := c.subgraphMapper.getFieldSubgraphs(c.pathBuilder.key()) - - // Find all arguments defined for this field - var argumentRefs []int - switch enclosingNode.Kind { - case ast.NodeKindObjectTypeDefinition: - fieldDefs := c.definition.ObjectTypeDefinitions[enclosingNode.Ref].FieldsDefinition.Refs - for _, fieldDefRef := range fieldDefs { - fieldDef := c.definition.FieldDefinitions[fieldDefRef] - if bytes.Equal(c.definition.FieldDefinitionNameBytes(fieldDefRef), fieldName) { - if fieldDef.HasArgumentsDefinitions { - argumentRefs = fieldDef.ArgumentsDefinition.Refs - } - break - } - } - case ast.NodeKindInterfaceTypeDefinition: - fieldDefs := c.definition.InterfaceTypeDefinitions[enclosingNode.Ref].FieldsDefinition.Refs - for _, fieldDefRef := range fieldDefs { - fieldDef := c.definition.FieldDefinitions[fieldDefRef] - if bytes.Equal(c.definition.FieldDefinitionNameBytes(fieldDefRef), fieldName) { - if fieldDef.HasArgumentsDefinitions { - argumentRefs = fieldDef.ArgumentsDefinition.Refs - } - break - } - } - } - - // Get provided arguments for this field - var providedArgs map[string]struct{} - if len(c.argumentsStack) > 0 { - providedArgs = c.argumentsStack[len(c.argumentsStack)-1] - } - - // Track input usage for implicitly null input type arguments - for _, argRef := range argumentRefs { - argName := string(c.definition.InputValueDefinitionNameString(argRef)) - - // Skip if argument was provided - if providedArgs != nil { - if _, provided := providedArgs[argName]; provided { - continue - } - } - - argType := c.definition.InputValueDefinitionType(argRef) - typeName := c.definition.ResolveTypeNameString(argType) - - // Check if this is an input object type - defNode, ok := c.definition.NodeByNameStr(typeName) - if !ok { - continue - } - - // Only track input object types (not scalars or enums) - if defNode.Kind != ast.NodeKindInputObjectTypeDefinition { - continue - } - - // Add input usage for the implicitly null input type - c.traverser.appendUniqueUsage(&graphqlmetrics.InputUsageInfo{ - NamedType: typeName, - Path: []string{typeName}, - SubgraphIDs: subgraphIDs, - IsNull: true, // Implicitly null (not provided) - }) - } -} diff --git a/router/pkg/graphqlschemausage/schemausage_test.go b/router/pkg/graphqlschemausage/schemausage_test.go index 0926c1de26..c0d6469393 100644 --- a/router/pkg/graphqlschemausage/schemausage_test.go +++ b/router/pkg/graphqlschemausage/schemausage_test.go @@ -2865,6 +2865,13 @@ func TestNullListHandling(t *testing.T) { } }`, expectedUsage: []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "String", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "tags"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, // Empty list is not null, field is still used + }, { NamedType: "String", TypeName: "SearchFilter", @@ -2886,7 +2893,7 @@ func TestNullListHandling(t *testing.T) { IsNull: false, }, }, - description: "Empty list should not produce any element entries (nothing to iterate), only implicit null for missing fields", + description: "Empty list should track field usage with IsNull=false (field is used, just no elements)", }, { name: "all lists null", From d2a5722e2beca3da225419178c97d8da9c74b967 Mon Sep 17 00:00:00 2001 From: JivusAyrus Date: Mon, 1 Dec 2025 14:31:49 +0100 Subject: [PATCH 16/22] fix: improve condition handling for SchemaUsageTrafficInspector --- .../src/core/services/SchemaUsageTrafficInspector.ts | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/controlplane/src/core/services/SchemaUsageTrafficInspector.ts b/controlplane/src/core/services/SchemaUsageTrafficInspector.ts index c37f008511..46189bee01 100644 --- a/controlplane/src/core/services/SchemaUsageTrafficInspector.ts +++ b/controlplane/src/core/services/SchemaUsageTrafficInspector.ts @@ -222,15 +222,19 @@ export class SchemaUsageTrafficInspector { if (change.typeName) { where.push(`hasAny(TypeNames, ['${change.typeName}'])`); } + // fieldName can be empty if a type was removed if (change.fieldName) { where.push(`FieldName = '${change.fieldName}'`); } + if (change.isInput) { where.push(`IsInput = true`); } else if (change.isArgument) { where.push(`IsArgument = true`); - } else if (change.isNull !== undefined) { + } + + if (change.isNull !== undefined) { where.push(`IsNull = ${change.isNull}`); } where.push(`IsIndirectFieldUsage = false`); From 9ec6f06f883560a4011a59ad877a53645ba2f11f Mon Sep 17 00:00:00 2001 From: StarpTech Date: Tue, 2 Dec 2025 16:03:16 +0100 Subject: [PATCH 17/22] chore: add test for nested arguments --- .../graphqlschemausage/schemausage_test.go | 825 ++++++++++++++++++ 1 file changed, 825 insertions(+) diff --git a/router/pkg/graphqlschemausage/schemausage_test.go b/router/pkg/graphqlschemausage/schemausage_test.go index c0d6469393..a51e1c0125 100644 --- a/router/pkg/graphqlschemausage/schemausage_test.go +++ b/router/pkg/graphqlschemausage/schemausage_test.go @@ -3002,6 +3002,831 @@ func TestNullListHandling(t *testing.T) { } } +// TestNestedFieldArguments verifies that arguments on nested fields (not just root Query fields) +// are tracked correctly with proper type names, paths, and subgraph IDs. +// This is critical for tracking schema usage on fields like User.friends(limit: Int) or +// Product.reviews(filter: ReviewFilter). +func TestNestedFieldArguments(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + user(id: ID!): User + product(id: ID!): Product + } + + type User { + id: ID! + name: String! + friends(limit: Int, offset: Int, filter: FriendFilter): [User!]! + posts(status: PostStatus, category: String): [Post!]! + } + + type Post { + id: ID! + title: String! + comments(first: Int!, after: String, includeReplies: Boolean): [Comment!]! + } + + type Comment { + id: ID! + text: String! + replies(maxDepth: Int): [Comment!]! + } + + type Product { + id: ID! + name: String! + reviews(filter: ReviewFilter!): [Review!]! + } + + type Review { + id: ID! + rating: Int! + author: User + } + + input FriendFilter { + minAge: Int + maxAge: Int + } + + input ReviewFilter { + minRating: Int + verified: Boolean + } + + enum PostStatus { + DRAFT + PUBLISHED + ARCHIVED + } + ` + + t.Run("nested arguments at multiple levels", func(t *testing.T) { + operation := ` + query GetUserContent($userId: ID!, $postStatus: PostStatus, $commentLimit: Int!, $includeReplies: Boolean) { + user(id: $userId) { + id + name + friends(limit: 10, offset: 0) { + id + name + } + posts(status: $postStatus, category: "tech") { + id + title + comments(first: $commentLimit, includeReplies: $includeReplies) { + id + text + replies(maxDepth: 3) { + id + text + } + } + } + } + } + ` + + variables := `{ + "userId": "123", + "postStatus": "PUBLISHED", + "commentLimit": 20, + "includeReplies": true + }` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "main-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"user", "product"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "User", FieldNames: []string{"id", "name", "friends", "posts"}}, + {TypeName: "Post", FieldNames: []string{"id", "title", "comments"}}, + {TypeName: "Comment", FieldNames: []string{"id", "text", "replies"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "GetUserContent", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Build a map for easier assertion + argumentMap := make(map[string]*graphqlmetricsv1.ArgumentUsageInfo) + for _, arg := range argumentUsageInfo { + key := strings.Join(arg.Path, ".") + argumentMap[key] = arg + } + + // Verify root level argument (Query.user.id) + require.Contains(t, argumentMap, "user.id", "Should track root level argument") + assert.Equal(t, "Query", argumentMap["user.id"].TypeName) + assert.Equal(t, "ID", argumentMap["user.id"].NamedType) + assert.False(t, argumentMap["user.id"].IsNull) + + // Verify nested level 1 argument (User.friends.limit) + require.Contains(t, argumentMap, "friends.limit", "Should track nested field argument") + assert.Equal(t, "User", argumentMap["friends.limit"].TypeName) + assert.Equal(t, "Int", argumentMap["friends.limit"].NamedType) + assert.False(t, argumentMap["friends.limit"].IsNull) + + // Verify nested level 1 argument (User.friends.offset) + require.Contains(t, argumentMap, "friends.offset", "Should track nested field argument") + assert.Equal(t, "User", argumentMap["friends.offset"].TypeName) + assert.Equal(t, "Int", argumentMap["friends.offset"].NamedType) + assert.False(t, argumentMap["friends.offset"].IsNull) + + // Verify nested level 1 implicit null argument (User.friends.filter) + require.Contains(t, argumentMap, "friends.filter", "Should track implicit null nested field argument") + assert.Equal(t, "User", argumentMap["friends.filter"].TypeName) + assert.Equal(t, "FriendFilter", argumentMap["friends.filter"].NamedType) + assert.True(t, argumentMap["friends.filter"].IsNull, "filter was not provided, should be implicitly null") + + // Verify nested level 1 argument (User.posts.status) + require.Contains(t, argumentMap, "posts.status", "Should track nested field argument with variable") + assert.Equal(t, "User", argumentMap["posts.status"].TypeName) + assert.Equal(t, "PostStatus", argumentMap["posts.status"].NamedType) + assert.False(t, argumentMap["posts.status"].IsNull) + + // Verify nested level 1 argument (User.posts.category) + require.Contains(t, argumentMap, "posts.category", "Should track nested field argument with inline value") + assert.Equal(t, "User", argumentMap["posts.category"].TypeName) + assert.Equal(t, "String", argumentMap["posts.category"].NamedType) + assert.False(t, argumentMap["posts.category"].IsNull) + + // Verify nested level 2 argument (Post.comments.first) + require.Contains(t, argumentMap, "comments.first", "Should track doubly nested field argument") + assert.Equal(t, "Post", argumentMap["comments.first"].TypeName) + assert.Equal(t, "Int", argumentMap["comments.first"].NamedType) + assert.False(t, argumentMap["comments.first"].IsNull) + + // Verify nested level 2 argument (Post.comments.includeReplies) + require.Contains(t, argumentMap, "comments.includeReplies", "Should track doubly nested field argument") + assert.Equal(t, "Post", argumentMap["comments.includeReplies"].TypeName) + assert.Equal(t, "Boolean", argumentMap["comments.includeReplies"].NamedType) + assert.False(t, argumentMap["comments.includeReplies"].IsNull) + + // Verify nested level 2 implicit null argument (Post.comments.after) + require.Contains(t, argumentMap, "comments.after", "Should track implicit null doubly nested argument") + assert.Equal(t, "Post", argumentMap["comments.after"].TypeName) + assert.Equal(t, "String", argumentMap["comments.after"].NamedType) + assert.True(t, argumentMap["comments.after"].IsNull, "after was not provided, should be implicitly null") + + // Verify nested level 3 argument (Comment.replies.maxDepth) + require.Contains(t, argumentMap, "replies.maxDepth", "Should track triply nested field argument") + assert.Equal(t, "Comment", argumentMap["replies.maxDepth"].TypeName) + assert.Equal(t, "Int", argumentMap["replies.maxDepth"].NamedType) + assert.False(t, argumentMap["replies.maxDepth"].IsNull) + + // Verify all arguments have correct subgraph IDs + for key, arg := range argumentMap { + assert.Equal(t, []string{"main-subgraph"}, arg.SubgraphIDs, "Argument %s should have main-subgraph", key) + } + }) + + t.Run("nested arguments with input object types", func(t *testing.T) { + operation := ` + query GetUserFriends($userId: ID!, $friendFilter: FriendFilter) { + user(id: $userId) { + id + friends(filter: $friendFilter, limit: 5) { + id + name + } + } + } + ` + + variables := `{ + "userId": "123", + "friendFilter": { + "minAge": 18, + "maxAge": 65 + } + }` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "main-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"user"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "User", FieldNames: []string{"id", "name", "friends"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "GetUserFriends", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Build maps for easier assertion + argumentMap := make(map[string]*graphqlmetricsv1.ArgumentUsageInfo) + for _, arg := range argumentUsageInfo { + key := strings.Join(arg.Path, ".") + argumentMap[key] = arg + } + + inputMap := make(map[string]*graphqlmetricsv1.InputUsageInfo) + for _, input := range inputUsageInfo { + key := strings.Join(input.Path, ".") + inputMap[key] = input + } + + // Verify nested argument with input object type + require.Contains(t, argumentMap, "friends.filter", "Should track nested argument with input type") + filterArg := argumentMap["friends.filter"] + assert.Equal(t, "User", filterArg.TypeName) + assert.Equal(t, "FriendFilter", filterArg.NamedType) + assert.False(t, filterArg.IsNull) + + // Verify nested argument with scalar type + require.Contains(t, argumentMap, "friends.limit", "Should track nested argument with scalar type") + limitArg := argumentMap["friends.limit"] + assert.Equal(t, "User", limitArg.TypeName) + assert.Equal(t, "Int", limitArg.NamedType) + assert.False(t, limitArg.IsNull) + + // Verify implicit null for missing offset argument + require.Contains(t, argumentMap, "friends.offset", "Should track implicit null for nested argument") + offsetArg := argumentMap["friends.offset"] + assert.Equal(t, "User", offsetArg.TypeName) + assert.Equal(t, "Int", offsetArg.NamedType) + assert.True(t, offsetArg.IsNull, "offset was not provided, should be implicitly null") + + // Verify input usage for the filter input object + require.Contains(t, inputMap, "FriendFilter", "Should track FriendFilter input type") + assert.Equal(t, "FriendFilter", inputMap["FriendFilter"].NamedType) + assert.False(t, inputMap["FriendFilter"].IsNull) + + // Verify input fields + require.Contains(t, inputMap, "FriendFilter.minAge", "Should track FriendFilter.minAge field") + assert.Equal(t, "Int", inputMap["FriendFilter.minAge"].NamedType) + assert.Equal(t, "FriendFilter", inputMap["FriendFilter.minAge"].TypeName) + assert.False(t, inputMap["FriendFilter.minAge"].IsNull) + + require.Contains(t, inputMap, "FriendFilter.maxAge", "Should track FriendFilter.maxAge field") + assert.Equal(t, "Int", inputMap["FriendFilter.maxAge"].NamedType) + assert.Equal(t, "FriendFilter", inputMap["FriendFilter.maxAge"].TypeName) + assert.False(t, inputMap["FriendFilter.maxAge"].IsNull) + }) + + t.Run("nested arguments with null input object", func(t *testing.T) { + operation := ` + query GetUserFriends($userId: ID!, $friendFilter: FriendFilter) { + user(id: $userId) { + id + friends(filter: $friendFilter) { + id + name + } + } + } + ` + + variables := `{ + "userId": "123", + "friendFilter": null + }` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "main-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"user"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "User", FieldNames: []string{"id", "name", "friends"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "GetUserFriends", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Build map for argument assertion + argumentMap := make(map[string]*graphqlmetricsv1.ArgumentUsageInfo) + for _, arg := range argumentUsageInfo { + key := strings.Join(arg.Path, ".") + argumentMap[key] = arg + } + + // Verify nested argument with null input object + require.Contains(t, argumentMap, "friends.filter", "Should track nested argument even when null") + filterArg := argumentMap["friends.filter"] + assert.Equal(t, "User", filterArg.TypeName) + assert.Equal(t, "FriendFilter", filterArg.NamedType) + assert.True(t, filterArg.IsNull, "filter variable is explicitly null") + + // Verify input usage tracks the null FriendFilter + var friendFilterUsage *graphqlmetricsv1.InputUsageInfo + for _, input := range inputUsageInfo { + if input.NamedType == "FriendFilter" && len(input.Path) == 1 { + friendFilterUsage = input + break + } + } + require.NotNil(t, friendFilterUsage, "Should track FriendFilter input type even when null") + assert.Equal(t, "FriendFilter", friendFilterUsage.NamedType) + assert.True(t, friendFilterUsage.IsNull, "FriendFilter should be tracked as null") + }) + + t.Run("nested arguments across multiple subgraphs", func(t *testing.T) { + // Enhanced schema with more types that span multiple subgraphs + multiSubgraphSchema := ` + schema { + query: Query + } + + type Query { + user(id: ID!): User + product(id: ID!): Product + order(id: ID!): Order + } + + type User { + id: ID! + name: String! + friends(limit: Int, filter: UserFilter): [User!]! + orders(status: OrderStatus, limit: Int): [Order!]! + } + + type Product { + id: ID! + name: String! + reviews(filter: ReviewFilter!, limit: Int): [Review!]! + } + + type Review { + id: ID! + rating: Int! + author: User + comments(first: Int, sortBy: String): [ReviewComment!]! + } + + type ReviewComment { + id: ID! + text: String! + } + + type Order { + id: ID! + status: OrderStatus! + items(category: String): [OrderItem!]! + customer: User + } + + type OrderItem { + id: ID! + product: Product + quantity: Int! + } + + input UserFilter { + minAge: Int + verified: Boolean + } + + input ReviewFilter { + minRating: Int + verified: Boolean + } + + enum OrderStatus { + PENDING + SHIPPED + DELIVERED + } + ` + + operation := ` + query GetUserDataAcrossSubgraphs($userId: ID!, $userFilter: UserFilter, $reviewFilter: ReviewFilter!, $orderStatus: OrderStatus) { + user(id: $userId) { + id + name + friends(limit: 10, filter: $userFilter) { + id + name + } + orders(status: $orderStatus, limit: 5) { + id + status + items(category: "electronics") { + id + quantity + product { + id + name + reviews(filter: $reviewFilter, limit: 3) { + id + rating + comments(first: 5, sortBy: "date") { + id + text + } + } + } + } + } + } + } + ` + + variables := `{ + "userId": "user-123", + "userFilter": { + "minAge": 18, + "verified": true + }, + "reviewFilter": { + "minRating": 4, + "verified": true + }, + "orderStatus": "SHIPPED" + }` + + def, rep := astparser.ParseGraphqlDocumentString(multiSubgraphSchema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + // Create THREE subgraphs - users, products, and orders come from different sources + usersSubgraph, err := plan.NewDataSourceConfiguration[any]( + "users-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"user"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "User", FieldNames: []string{"id", "name", "friends", "orders"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + productsSubgraph, err := plan.NewDataSourceConfiguration[any]( + "products-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"product"}}, + {TypeName: "Product", FieldNames: []string{"id", "name", "reviews"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Product", FieldNames: []string{"id", "name", "reviews"}}, + {TypeName: "Review", FieldNames: []string{"id", "rating", "author", "comments"}}, + {TypeName: "ReviewComment", FieldNames: []string{"id", "text"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + ordersSubgraph, err := plan.NewDataSourceConfiguration[any]( + "orders-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"order"}}, + {TypeName: "Order", FieldNames: []string{"id", "status", "items", "customer"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Order", FieldNames: []string{"id", "status", "items", "customer"}}, + {TypeName: "OrderItem", FieldNames: []string{"id", "product", "quantity"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{usersSubgraph, productsSubgraph, ordersSubgraph}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "GetUserDataAcrossSubgraphs", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Build map for argument assertion + argumentMap := make(map[string]*graphqlmetricsv1.ArgumentUsageInfo) + for _, arg := range argumentUsageInfo { + key := strings.Join(arg.Path, ".") + argumentMap[key] = arg + } + + // Build map for input assertion + inputMap := make(map[string]*graphqlmetricsv1.InputUsageInfo) + for _, input := range inputUsageInfo { + key := strings.Join(input.Path, ".") + inputMap[key] = input + } + + // ======================================== + // Verify USERS SUBGRAPH arguments + // ======================================== + + // Root level: Query.user(id:) -> users-subgraph + require.Contains(t, argumentMap, "user.id", "Should track Query.user(id:)") + assert.Equal(t, "Query", argumentMap["user.id"].TypeName) + assert.Equal(t, "ID", argumentMap["user.id"].NamedType) + assert.Equal(t, []string{"users-subgraph"}, argumentMap["user.id"].SubgraphIDs, + "Query.user argument should be attributed to users-subgraph") + assert.False(t, argumentMap["user.id"].IsNull) + + // Nested level 1: User.friends(limit:) -> users-subgraph + require.Contains(t, argumentMap, "friends.limit", "Should track User.friends(limit:)") + assert.Equal(t, "User", argumentMap["friends.limit"].TypeName) + assert.Equal(t, "Int", argumentMap["friends.limit"].NamedType) + assert.Equal(t, []string{"users-subgraph"}, argumentMap["friends.limit"].SubgraphIDs, + "User.friends.limit argument should be attributed to users-subgraph") + assert.False(t, argumentMap["friends.limit"].IsNull) + + // Nested level 1: User.friends(filter:) -> users-subgraph (input object type) + require.Contains(t, argumentMap, "friends.filter", "Should track User.friends(filter:)") + assert.Equal(t, "User", argumentMap["friends.filter"].TypeName) + assert.Equal(t, "UserFilter", argumentMap["friends.filter"].NamedType) + assert.Equal(t, []string{"users-subgraph"}, argumentMap["friends.filter"].SubgraphIDs, + "User.friends.filter argument should be attributed to users-subgraph") + assert.False(t, argumentMap["friends.filter"].IsNull) + + // Nested level 1: User.orders(status:) -> users-subgraph + require.Contains(t, argumentMap, "orders.status", "Should track User.orders(status:)") + assert.Equal(t, "User", argumentMap["orders.status"].TypeName) + assert.Equal(t, "OrderStatus", argumentMap["orders.status"].NamedType) + assert.Equal(t, []string{"users-subgraph"}, argumentMap["orders.status"].SubgraphIDs, + "User.orders.status argument should be attributed to users-subgraph") + assert.False(t, argumentMap["orders.status"].IsNull) + + // Nested level 1: User.orders(limit:) -> users-subgraph + require.Contains(t, argumentMap, "orders.limit", "Should track User.orders(limit:)") + assert.Equal(t, "User", argumentMap["orders.limit"].TypeName) + assert.Equal(t, "Int", argumentMap["orders.limit"].NamedType) + assert.Equal(t, []string{"users-subgraph"}, argumentMap["orders.limit"].SubgraphIDs, + "User.orders.limit argument should be attributed to users-subgraph") + assert.False(t, argumentMap["orders.limit"].IsNull) + + // ======================================== + // Verify ORDERS SUBGRAPH arguments + // ======================================== + + // Nested level 2: Order.items(category:) -> orders-subgraph + require.Contains(t, argumentMap, "items.category", "Should track Order.items(category:)") + assert.Equal(t, "Order", argumentMap["items.category"].TypeName) + assert.Equal(t, "String", argumentMap["items.category"].NamedType) + assert.Equal(t, []string{"orders-subgraph"}, argumentMap["items.category"].SubgraphIDs, + "Order.items.category argument should be attributed to orders-subgraph") + assert.False(t, argumentMap["items.category"].IsNull) + + // ======================================== + // Verify PRODUCTS SUBGRAPH arguments + // ======================================== + + // Nested level 4: Product.reviews(filter:) -> products-subgraph + require.Contains(t, argumentMap, "reviews.filter", "Should track Product.reviews(filter:)") + assert.Equal(t, "Product", argumentMap["reviews.filter"].TypeName) + assert.Equal(t, "ReviewFilter", argumentMap["reviews.filter"].NamedType) + assert.Equal(t, []string{"products-subgraph"}, argumentMap["reviews.filter"].SubgraphIDs, + "Product.reviews.filter argument should be attributed to products-subgraph") + assert.False(t, argumentMap["reviews.filter"].IsNull) + + // Nested level 4: Product.reviews(limit:) -> products-subgraph + require.Contains(t, argumentMap, "reviews.limit", "Should track Product.reviews(limit:)") + assert.Equal(t, "Product", argumentMap["reviews.limit"].TypeName) + assert.Equal(t, "Int", argumentMap["reviews.limit"].NamedType) + assert.Equal(t, []string{"products-subgraph"}, argumentMap["reviews.limit"].SubgraphIDs, + "Product.reviews.limit argument should be attributed to products-subgraph") + assert.False(t, argumentMap["reviews.limit"].IsNull) + + // Nested level 5: Review.comments(first:) -> products-subgraph + require.Contains(t, argumentMap, "comments.first", "Should track Review.comments(first:)") + assert.Equal(t, "Review", argumentMap["comments.first"].TypeName) + assert.Equal(t, "Int", argumentMap["comments.first"].NamedType) + assert.Equal(t, []string{"products-subgraph"}, argumentMap["comments.first"].SubgraphIDs, + "Review.comments.first argument should be attributed to products-subgraph") + assert.False(t, argumentMap["comments.first"].IsNull) + + // Nested level 5: Review.comments(sortBy:) -> products-subgraph + require.Contains(t, argumentMap, "comments.sortBy", "Should track Review.comments(sortBy:)") + assert.Equal(t, "Review", argumentMap["comments.sortBy"].TypeName) + assert.Equal(t, "String", argumentMap["comments.sortBy"].NamedType) + assert.Equal(t, []string{"products-subgraph"}, argumentMap["comments.sortBy"].SubgraphIDs, + "Review.comments.sortBy argument should be attributed to products-subgraph") + assert.False(t, argumentMap["comments.sortBy"].IsNull) + + // ======================================== + // Verify INPUT TYPE subgraph attribution + // ======================================== + + // UserFilter should be attributed to users-subgraph (used by User.friends) + require.Contains(t, inputMap, "UserFilter", "Should track UserFilter input type") + assert.Equal(t, "UserFilter", inputMap["UserFilter"].NamedType) + assert.Equal(t, []string{"users-subgraph"}, inputMap["UserFilter"].SubgraphIDs, + "UserFilter should be attributed to users-subgraph") + assert.False(t, inputMap["UserFilter"].IsNull) + + // UserFilter.minAge field + require.Contains(t, inputMap, "UserFilter.minAge", "Should track UserFilter.minAge field") + assert.Equal(t, "Int", inputMap["UserFilter.minAge"].NamedType) + assert.Equal(t, "UserFilter", inputMap["UserFilter.minAge"].TypeName) + assert.Equal(t, []string{"users-subgraph"}, inputMap["UserFilter.minAge"].SubgraphIDs, + "UserFilter.minAge should be attributed to users-subgraph") + + // UserFilter.verified field + require.Contains(t, inputMap, "UserFilter.verified", "Should track UserFilter.verified field") + assert.Equal(t, "Boolean", inputMap["UserFilter.verified"].NamedType) + assert.Equal(t, "UserFilter", inputMap["UserFilter.verified"].TypeName) + assert.Equal(t, []string{"users-subgraph"}, inputMap["UserFilter.verified"].SubgraphIDs, + "UserFilter.verified should be attributed to users-subgraph") + + // ReviewFilter should be attributed to products-subgraph (used by Product.reviews) + require.Contains(t, inputMap, "ReviewFilter", "Should track ReviewFilter input type") + assert.Equal(t, "ReviewFilter", inputMap["ReviewFilter"].NamedType) + assert.Equal(t, []string{"products-subgraph"}, inputMap["ReviewFilter"].SubgraphIDs, + "ReviewFilter should be attributed to products-subgraph") + assert.False(t, inputMap["ReviewFilter"].IsNull) + + // ReviewFilter.minRating field + require.Contains(t, inputMap, "ReviewFilter.minRating", "Should track ReviewFilter.minRating field") + assert.Equal(t, "Int", inputMap["ReviewFilter.minRating"].NamedType) + assert.Equal(t, "ReviewFilter", inputMap["ReviewFilter.minRating"].TypeName) + assert.Equal(t, []string{"products-subgraph"}, inputMap["ReviewFilter.minRating"].SubgraphIDs, + "ReviewFilter.minRating should be attributed to products-subgraph") + + // ReviewFilter.verified field + require.Contains(t, inputMap, "ReviewFilter.verified", "Should track ReviewFilter.verified field") + assert.Equal(t, "Boolean", inputMap["ReviewFilter.verified"].NamedType) + assert.Equal(t, "ReviewFilter", inputMap["ReviewFilter.verified"].TypeName) + assert.Equal(t, []string{"products-subgraph"}, inputMap["ReviewFilter.verified"].SubgraphIDs, + "ReviewFilter.verified should be attributed to products-subgraph") + + // ======================================== + // Verify ENUM usage subgraph attribution + // ======================================== + + // OrderStatus enum used by User.orders should be attributed to users-subgraph + var orderStatusUsage *graphqlmetricsv1.InputUsageInfo + for _, input := range inputUsageInfo { + if input.NamedType == "OrderStatus" && len(input.EnumValues) > 0 { + orderStatusUsage = input + break + } + } + require.NotNil(t, orderStatusUsage, "Should track OrderStatus enum usage") + assert.Equal(t, []string{"users-subgraph"}, orderStatusUsage.SubgraphIDs, + "OrderStatus enum should be attributed to users-subgraph (used by User.orders)") + assert.Contains(t, orderStatusUsage.EnumValues, "SHIPPED") + + // ======================================== + // Verify NO CROSS-CONTAMINATION + // ======================================== + + // Ensure users-subgraph arguments don't have products-subgraph or orders-subgraph + for key, arg := range argumentMap { + if arg.TypeName == "User" { + assert.NotContains(t, arg.SubgraphIDs, "products-subgraph", + "User field argument %s should not have products-subgraph", key) + assert.NotContains(t, arg.SubgraphIDs, "orders-subgraph", + "User field argument %s should not have orders-subgraph", key) + } + if arg.TypeName == "Product" || arg.TypeName == "Review" { + assert.NotContains(t, arg.SubgraphIDs, "users-subgraph", + "Product/Review field argument %s should not have users-subgraph", key) + assert.NotContains(t, arg.SubgraphIDs, "orders-subgraph", + "Product/Review field argument %s should not have orders-subgraph", key) + } + if arg.TypeName == "Order" || arg.TypeName == "OrderItem" { + assert.NotContains(t, arg.SubgraphIDs, "users-subgraph", + "Order/OrderItem field argument %s should not have users-subgraph", key) + assert.NotContains(t, arg.SubgraphIDs, "products-subgraph", + "Order/OrderItem field argument %s should not have products-subgraph", key) + } + } + }) +} + // TestNilVariablesHandling verifies that nil variables are handled gracefully without panicking. // This is a defensive test to ensure the API doesn't crash when callers pass nil for variables. func TestNilVariablesHandling(t *testing.T) { From 35b66b66c297d686044f222f78f20b0975aac758 Mon Sep 17 00:00:00 2001 From: JivusAyrus Date: Wed, 3 Dec 2025 12:34:41 +0100 Subject: [PATCH 18/22] feat: enhance schema change handling with structured metadata - Added `meta` field to `SchemaDiff` interface to include structured data from graphql-inspector. - Updated `getSchemaDiff` and `getDiffBetweenGraphs` functions to utilize the new `meta` field for breaking and non-breaking changes. - Refactored type change categorization to use structured metadata instead of parsing messages in `toInspectorChange` function. - Improved type handling in `SchemaUsageTrafficInspector` by incorporating new type change category logic. --- .../src/core/composition/schemaCheck.ts | 7 +- .../SchemaUsageTrafficInspector.test.ts | 1 + .../services/SchemaUsageTrafficInspector.ts | 177 ++++++++---------- 3 files changed, 86 insertions(+), 99 deletions(-) diff --git a/controlplane/src/core/composition/schemaCheck.ts b/controlplane/src/core/composition/schemaCheck.ts index ad1afe591c..f51570cbf8 100644 --- a/controlplane/src/core/composition/schemaCheck.ts +++ b/controlplane/src/core/composition/schemaCheck.ts @@ -1,4 +1,4 @@ -import { ChangeType, CriticalityLevel, diff, TypeOfChangeType } from '@graphql-inspector/core'; +import { ChangeType, CriticalityLevel, diff, TypeOfChangeType, SerializableChange } from '@graphql-inspector/core'; import { EnumStatusCode } from '@wundergraph/cosmo-connect/dist/common/common_pb'; import { GraphQLSchema } from 'graphql'; import { buildSchema } from './composition.js'; @@ -9,6 +9,8 @@ export interface SchemaDiff { // path is the path to the field or type that changed path: string; isBreaking: boolean; + // meta contains structured data about the change from graphql-inspector + meta: SerializableChange['meta']; } export interface GetDiffBetweenGraphsSuccess { @@ -34,6 +36,7 @@ export async function getSchemaDiff(oldSchemaSDL: GraphQLSchema, newSchemaSDL: G message: change.message, changeType: change.type, path: change.path ?? '', + meta: change.meta, isBreaking: change.criticality.level === CriticalityLevel.Breaking || // We consider enum value changes as breaking changes because it is common to use enums in switch statements @@ -82,6 +85,7 @@ export async function getDiffBetweenGraphs( message: breakingChange.message, changeType: breakingChange.changeType, path: breakingChange.path, + meta: breakingChange.meta, isBreaking: true, }; }); @@ -93,6 +97,7 @@ export async function getDiffBetweenGraphs( message: nonBreakingChange.message, changeType: nonBreakingChange.changeType, path: nonBreakingChange.path, + meta: nonBreakingChange.meta, isBreaking: false, }; }); diff --git a/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts b/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts index 4975439ac2..ae058b2588 100644 --- a/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts +++ b/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts @@ -483,6 +483,7 @@ async function getBreakingChanges(a: GraphQLSchema, b: GraphQLSchema): Promise "Boolean!" - * - Inner "!" from arrays like "[Boolean!]" -> "Boolean" + * Get the named type from a TypeNode AST */ -function extractBaseType(type: string): string { - let base = type.trim(); - - // Remove trailing "!" - if (base.endsWith('!')) { - base = base.slice(0, -1).trim(); +function getNamedType(typeNode: TypeNode): NamedTypeNode { + if (typeNode.kind === 'NamedType') { + return typeNode; } - - // Handle array types like "[Boolean!]" or "[Boolean!]!" - // Remove outer brackets and inner "!" - if (base.startsWith('[') && base.endsWith(']')) { - base = base.slice(1, -1).trim(); - // Remove inner "!" if present - if (base.endsWith('!')) { - base = base.slice(0, -1).trim(); - } + if (typeNode.kind === 'NonNullType' || typeNode.kind === 'ListType') { + return getNamedType(typeNode.type); } - - return base; + throw new Error('Unexpected type node'); } /** - * Normalize type for structural comparison (remove required indicators but keep structure) - */ -function normalizeType(type: string): string { - let normalized = type.trim(); - // Remove trailing "!" but keep array structure - if (normalized.endsWith('!')) { - normalized = normalized.slice(0, -1).trim(); - } - return normalized; -} - -/** - * Parses an argument removal message and determines if the argument was required. - * - * @param message - String in format: "Argument 'name: Type' was removed from field 'TypeName.fieldName'" - * @returns true if the argument was required (type ends with '!'), false if optional + * Determines the type change category from meta information using GraphQL's type parsing utilities. + * Works for both InputFieldTypeChanged and FieldArgumentTypeChanged. * - * @example - * parseArgumentRemoval("Argument 'criteria: SearchInput!' was removed from field 'Query.findEmployees'") - * // Returns true (required) - * - * @example - * parseArgumentRemoval("Argument 'criteria: SearchInput' was removed from field 'Query.findEmployees'") - * // Returns false (optional) - */ -export function parseArgumentRemoval(message: string): boolean { - // Extract the argument type from the message - // Format: "Argument 'name: Type' was removed from field '...'" - const match = message.match(/Argument '([^:]+):\s*([^']+)' was removed/); - - if (!match || match.length < 3) { - throw new Error(`Invalid argument removal message format: ${message}`); - } - - const argumentType = match[2].trim(); - - // Check if the type ends with "!" to determine if it was required - return argumentType.endsWith('!'); -} - -/** - * Parses a type change message (for both input fields and arguments) and categorizes it into one of the FieldTypeChangeCategory cases. - * Supports two message formats: - * - "Input field 'TypeName.fieldName' changed type from 'FromType' to 'ToType'" - * - "Type for argument 'name' on field 'TypeName.fieldName' changed from 'FromType' to 'ToType'" - * - * @param message - String in either format above + * @param oldType - The old type from meta (e.g., oldInputFieldType or oldArgumentType) + * @param newType - The new type from meta (e.g., newInputFieldType or newArgumentType) * @returns The category of the type change * * @example - * parseTypeChange("Input field 'SearchInput.hasPets' changed type from 'Boolean!' to '[Boolean!]!'") - * // Returns FieldTypeChangeCategory.REQUIRED_DIFFERENT_TO_REQUIRED_DIFFERENT + * getTypeChangeCategory("Boolean!", "[Boolean!]!") + * // Returns FieldTypeChangeCategory.REQUIRED_TO_REQUIRED_DIFFERENT * * @example - * parseTypeChange("Type for argument 'criteria' on field 'Query.findEmployees' changed from 'SearchInput' to 'SearchInput!'") - * // Returns FieldTypeChangeCategory.OPTIONAL_SAME_TO_REQUIRED_SAME + * getTypeChangeCategory("SearchInput", "SearchInput!") + * // Returns FieldTypeChangeCategory.OPTIONAL_TO_REQUIRED_SAME */ -export function parseTypeChange(message: string): FieldTypeChangeCategory { - // Try both patterns: "changed type from" (input fields) and "changed from" (arguments) - const match = - message.match(/changed type from '([^']+)' to '([^']+)'/) || message.match(/changed from '([^']+)' to '([^']+)'/); - - if (!match || match.length < 3) { - throw new Error(`Invalid type change message format: ${message}`); - } - - const fromType = match[1]; - const toType = match[2]; - - // Determine if types are required (end with "!") - const fromRequired = fromType.endsWith('!'); - const toRequired = toType.endsWith('!'); - - const fromBaseType = extractBaseType(fromType); - const toBaseType = extractBaseType(toType); - const fromNormalized = normalizeType(fromType); - const toNormalized = normalizeType(toType); +export function getTypeChangeCategory(oldType: string, newType: string): FieldTypeChangeCategory { + // Parse type strings into AST using GraphQL's parseType + // Example 1: "Boolean!" -> NonNullType { type: NamedType { name: "Boolean" } } + // Example 2: "[Boolean!]!" -> NonNullType { type: ListType { type: NonNullType { type: NamedType { name: "Boolean" } } } } + // Example 3: "SearchInput" -> NamedType { name: "SearchInput" } + const oldTypeNode = parseType(oldType); + const newTypeNode = parseType(newType); + + // Check if types are required (NonNull) by checking the outermost wrapper + // Example 1: "Boolean!" -> fromRequired = true + // Example 2: "[Boolean]" -> fromRequired = false + // Example 3: "SearchInput" -> fromRequired = false + const fromRequired = oldTypeNode.kind === 'NonNullType'; + const toRequired = newTypeNode.kind === 'NonNullType'; + + // Get the named types (unwraps all wrappers like NonNull and List) + // Example 1: "[Boolean!]!" -> NamedType { name: "Boolean" } + // Example 2: "SearchInput" -> NamedType { name: "SearchInput" } + // Example 3: "[String]" -> NamedType { name: "String" } + const oldNamedType = getNamedType(oldTypeNode); + const newNamedType = getNamedType(newTypeNode); + + // Get base type names from the named type nodes + // Example 1: "[Boolean!]!" -> "Boolean" + // Example 2: "SearchInput" -> "SearchInput" + // Example 3: "[String]" -> "String" + const oldTypeName = oldNamedType.name.value; + const newTypeName = newNamedType.name.value; + + // Get normalized structure (without NonNull on the outermost layer) + // This preserves inner structure like [Type] vs Type + // Example 1: "Boolean!" -> normalized: "Boolean" + // Example 2: "[Boolean!]!" -> normalized: "[Boolean!]" + // Example 3: "[Boolean]" -> normalized: "[Boolean]" + // Example 4: "SearchInput" -> normalized: "SearchInput" + const oldNormalized = print(fromRequired ? (oldTypeNode as NonNullTypeNode).type : oldTypeNode); + const newNormalized = print(toRequired ? (newTypeNode as NonNullTypeNode).type : newTypeNode); // Check if base types are the same AND structure is the same - const sameBaseType = fromBaseType === toBaseType; - const sameStructure = fromNormalized === toNormalized; + // Example 1: "Boolean" vs "Boolean!" -> sameBaseType: true, sameStructure: true + // Example 2: "Boolean" vs "[Boolean]" -> sameBaseType: true, sameStructure: false + // Example 3: "Boolean" vs "String" -> sameBaseType: false, sameStructure: false + // Example 4: "[Boolean!]" vs "[Boolean!]!" -> sameBaseType: true, sameStructure: true + const sameBaseType = oldTypeName === newTypeName; + const sameStructure = oldNormalized === newNormalized; // Types are considered "same" only if both base type and structure match + // Example 1: "Boolean" -> "Boolean!" -> sameType: true (same base + same structure) + // Example 2: "Boolean" -> "[Boolean]" -> sameType: false (same base but different structure) + // Example 3: "Boolean" -> "String" -> sameType: false (different base) const sameType = sameBaseType && sameStructure; // Categorize based on the 4 cases @@ -471,7 +446,9 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In } // 1. When the type of input field has changed, we know the exact type name and field name e.g. 'MyInput.name' case ChangeType.InputFieldTypeChanged: { - const inputFieldTypeChangeCategory = parseTypeChange(change.message); + // Use structured meta instead of parsing message + const meta = change.meta as InputFieldTypeChangedChange['meta']; + const inputFieldTypeChangeCategory = getTypeChangeCategory(meta.oldInputFieldType, meta.newInputFieldType); switch (inputFieldTypeChangeCategory) { case FieldTypeChangeCategory.OPTIONAL_TO_REQUIRED_SAME: { // Int -> Int! @@ -534,7 +511,9 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In // 1. When an argument has changed, we know the exact path to the argument e.g. 'Query.engineer.id' // and the type name e.g. 'Query' case ChangeType.FieldArgumentTypeChanged: { - const argumentTypeChangeCategory = parseTypeChange(change.message); + // Use structured meta instead of parsing message + const meta = change.meta as FieldArgumentTypeChangedChange['meta']; + const argumentTypeChangeCategory = getTypeChangeCategory(meta.oldArgumentType, meta.newArgumentType); switch (argumentTypeChangeCategory) { case FieldTypeChangeCategory.OPTIONAL_TO_REQUIRED_SAME: { // SearchInput -> SearchInput! @@ -600,7 +579,9 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In }; } case ChangeType.FieldArgumentRemoved: { - const isRequired = parseArgumentRemoval(change.message); + // Use structured meta instead of parsing message + const meta = change.meta as FieldArgumentRemovedChange['meta']; + const isRequired = meta.removedFieldType.endsWith('!'); if (isRequired) { // in this case, all the ops which use this argument are breaking return { From 9733d9ab2bca8b0ce68f1bb5254335351baa986d Mon Sep 17 00:00:00 2001 From: JivusAyrus Date: Wed, 3 Dec 2025 13:00:19 +0100 Subject: [PATCH 19/22] fix: tests --- controlplane/test/breaking-changes.test.ts | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/controlplane/test/breaking-changes.test.ts b/controlplane/test/breaking-changes.test.ts index 0a0358e0de..fa61ac65ed 100644 --- a/controlplane/test/breaking-changes.test.ts +++ b/controlplane/test/breaking-changes.test.ts @@ -153,18 +153,28 @@ describe('BreakingChanges', () => { changeType: SchemaChangeType.TYPE_ADDED, path: 'openfed__FieldSet', isBreaking: false, + meta: { + addedTypeName: 'openfed__FieldSet', + }, }, { message: "Directive 'key' was added to object 'User'", changeType: SchemaChangeType.DIRECTIVE_USAGE_OBJECT_ADDED, path: 'User.key', isBreaking: false, + meta: { + addedDirectiveName: 'key', + objectName: 'User', + }, }, { message: "Directive 'key' was added", changeType: SchemaChangeType.DIRECTIVE_ADDED, path: '@key', isBreaking: false, + meta: { + addedDirectiveName: 'key', + }, }, ]); } From 251354f8f89f6ebe8c532adba92511a4bdec17e9 Mon Sep 17 00:00:00 2001 From: StarpTech Date: Wed, 3 Dec 2025 13:43:56 +0100 Subject: [PATCH 20/22] chore: revert to b.loop in benchs --- .../pkg/graphqlschemausage/schemausage_bench_test.go | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/router/pkg/graphqlschemausage/schemausage_bench_test.go b/router/pkg/graphqlschemausage/schemausage_bench_test.go index d2118cb18e..3f732a1d0e 100644 --- a/router/pkg/graphqlschemausage/schemausage_bench_test.go +++ b/router/pkg/graphqlschemausage/schemausage_bench_test.go @@ -111,7 +111,7 @@ func BenchmarkGetTypeFieldUsageInfo(b *testing.B) { b.ResetTimer() b.ReportAllocs() - for i := 0; i < b.N; i++ { + for b.Loop() { result := GetTypeFieldUsageInfo(generatedPlan) _ = result // Prevent compiler optimization } @@ -124,7 +124,7 @@ func BenchmarkGetArgumentUsageInfo(b *testing.B) { b.ResetTimer() b.ReportAllocs() - for i := 0; i < b.N; i++ { + for b.Loop() { result, err := GetArgumentUsageInfo(operation, definition, variables, generatedPlan, nil) if err != nil { b.Fatal(err) @@ -140,7 +140,7 @@ func BenchmarkGetInputUsageInfo(b *testing.B) { b.ResetTimer() b.ReportAllocs() - for i := 0; i < b.N; i++ { + for b.Loop() { result, err := GetInputUsageInfo(operation, definition, variables, generatedPlan, nil) if err != nil { b.Fatal(err) @@ -157,7 +157,7 @@ func BenchmarkIntoGraphQLMetrics(b *testing.B) { b.ResetTimer() b.ReportAllocs() - for i := 0; i < b.N; i++ { + for b.Loop() { result := typeFieldMetrics.IntoGraphQLMetrics() _ = result // Prevent compiler optimization } @@ -171,7 +171,7 @@ func BenchmarkSchemaUsageEndToEnd(b *testing.B) { b.ResetTimer() b.ReportAllocs() - for i := 0; i < b.N; i++ { + for b.Loop() { // Extract type field usage typeFieldUsage := GetTypeFieldUsageInfo(generatedPlan) @@ -311,7 +311,7 @@ func BenchmarkSchemaUsageWithManyFields(b *testing.B) { b.ResetTimer() b.ReportAllocs() - for i := 0; i < b.N; i++ { + for b.Loop() { // Extract type field usage typeFieldUsage := GetTypeFieldUsageInfo(generatedPlan) From 872ec6f7365f376cb888615b2a77955ab9ba077e Mon Sep 17 00:00:00 2001 From: StarpTech Date: Wed, 3 Dec 2025 13:45:23 +0100 Subject: [PATCH 21/22] chore: revert dev config --- router/debug.config.yaml | 4 ++++ router/demo.config.yaml | 4 ---- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/router/debug.config.yaml b/router/debug.config.yaml index 0f241074aa..10b042397b 100644 --- a/router/debug.config.yaml +++ b/router/debug.config.yaml @@ -5,6 +5,10 @@ version: '1' +#graphql_metrics: +# enabled: true +# collector_endpoint: http://localhost:4005 + execution_config: file: path: './__schemas/config.json' diff --git a/router/demo.config.yaml b/router/demo.config.yaml index 8390baa298..ccea543c6d 100644 --- a/router/demo.config.yaml +++ b/router/demo.config.yaml @@ -5,10 +5,6 @@ version: "1" -graphql_metrics: - enabled: true - collector_endpoint: http://localhost:4005 - events: providers: nats: From 93d1f9ed46d012b8daec03af42a5b26e74c156aa Mon Sep 17 00:00:00 2001 From: StarpTech Date: Thu, 4 Dec 2025 23:20:56 +0100 Subject: [PATCH 22/22] chore: fix linter --- cli/src/core/client/client.ts | 1 - router/pkg/graphqlschemausage/schemausage.go | 5 ----- 2 files changed, 6 deletions(-) diff --git a/cli/src/core/client/client.ts b/cli/src/core/client/client.ts index 445f60ca01..05274f37cf 100644 --- a/cli/src/core/client/client.ts +++ b/cli/src/core/client/client.ts @@ -19,7 +19,6 @@ export const CreateClient = (opts: ClientOptions): Client => { const transport = createConnectTransport({ // Requests will be made to /./method baseUrl: opts.baseUrl, - // You have to tell the Node.js http API which HTTP version to use. httpVersion: '1.1', nodeOptions: { diff --git a/router/pkg/graphqlschemausage/schemausage.go b/router/pkg/graphqlschemausage/schemausage.go index d114de0030..f52bc7e6ec 100644 --- a/router/pkg/graphqlschemausage/schemausage.go +++ b/router/pkg/graphqlschemausage/schemausage.go @@ -481,11 +481,6 @@ type subgraphMapper struct { variableToSubgraphs map[string][]string } -// getFieldSubgraphs returns subgraph IDs for a field path -func (s *subgraphMapper) getFieldSubgraphs(pathKey string) []string { - return s.fieldToSubgraphs[pathKey] -} - // getVariableSubgraphs returns subgraph IDs for a variable func (s *subgraphMapper) getVariableSubgraphs(varName string) []string { return s.variableToSubgraphs[varName]