diff --git a/cli/src/core/client/client.ts b/cli/src/core/client/client.ts index 445f60ca01..05274f37cf 100644 --- a/cli/src/core/client/client.ts +++ b/cli/src/core/client/client.ts @@ -19,7 +19,6 @@ export const CreateClient = (opts: ClientOptions): Client => { const transport = createConnectTransport({ // Requests will be made to /./method baseUrl: opts.baseUrl, - // You have to tell the Node.js http API which HTTP version to use. httpVersion: '1.1', nodeOptions: { diff --git a/controlplane/src/core/composition/schemaCheck.ts b/controlplane/src/core/composition/schemaCheck.ts index ad1afe591c..f51570cbf8 100644 --- a/controlplane/src/core/composition/schemaCheck.ts +++ b/controlplane/src/core/composition/schemaCheck.ts @@ -1,4 +1,4 @@ -import { ChangeType, CriticalityLevel, diff, TypeOfChangeType } from '@graphql-inspector/core'; +import { ChangeType, CriticalityLevel, diff, TypeOfChangeType, SerializableChange } from '@graphql-inspector/core'; import { EnumStatusCode } from '@wundergraph/cosmo-connect/dist/common/common_pb'; import { GraphQLSchema } from 'graphql'; import { buildSchema } from './composition.js'; @@ -9,6 +9,8 @@ export interface SchemaDiff { // path is the path to the field or type that changed path: string; isBreaking: boolean; + // meta contains structured data about the change from graphql-inspector + meta: SerializableChange['meta']; } export interface GetDiffBetweenGraphsSuccess { @@ -34,6 +36,7 @@ export async function getSchemaDiff(oldSchemaSDL: GraphQLSchema, newSchemaSDL: G message: change.message, changeType: change.type, path: change.path ?? '', + meta: change.meta, isBreaking: change.criticality.level === CriticalityLevel.Breaking || // We consider enum value changes as breaking changes because it is common to use enums in switch statements @@ -82,6 +85,7 @@ export async function getDiffBetweenGraphs( message: breakingChange.message, changeType: breakingChange.changeType, path: breakingChange.path, + meta: breakingChange.meta, isBreaking: true, }; }); @@ -93,6 +97,7 @@ export async function getDiffBetweenGraphs( message: nonBreakingChange.message, changeType: nonBreakingChange.changeType, path: nonBreakingChange.path, + meta: nonBreakingChange.meta, isBreaking: false, }; }); diff --git a/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts b/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts index 2f110c3fdd..ae058b2588 100644 --- a/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts +++ b/controlplane/src/core/services/SchemaUsageTrafficInspector.test.ts @@ -19,11 +19,12 @@ describe('Schema Change converter', (ctx) => { const changes = await getBreakingChanges(a, b); + // the below conditions are for what would constitute a breaking change + // if the condition exists, it would be breaking expect(changes).toEqual([ { - isArgument: true, - path: ['a', 'b'], schemaChangeId: '0', + path: ['a'], typeName: 'Query', }, ]); @@ -51,13 +52,162 @@ describe('Schema Change converter', (ctx) => { expect(changes).toEqual([ { - isArgument: true, - path: ['details', 'all'], schemaChangeId: '0', + path: ['details'], typeName: 'Rocket', }, ]); }); + + test('Remove a required argument', async () => { + const a = buildSchema(/* GraphQL */ ` + type Query { + a(b: Boolean!): String + } + `); + const b = buildSchema(/* GraphQL */ ` + type Query { + a: String + } + `); + + const changes = await getBreakingChanges(a, b); + + expect(changes).toEqual([ + { + schemaChangeId: '0', + path: ['a'], + typeName: 'Query', + }, + ]); + }); + + test('Remove an optional argument', async () => { + const a = buildSchema(/* GraphQL */ ` + type Query { + a(b: Boolean): String + } + `); + const b = buildSchema(/* GraphQL */ ` + type Query { + a: String + } + `); + + const changes = await getBreakingChanges(a, b); + + expect(changes).toEqual([ + { + schemaChangeId: '0', + path: ['a', 'b'], + typeName: 'Query', + isArgument: true, + isNull: false, + }, + ]); + }); + + test('Change argument type from optional to required same', async () => { + const a = buildSchema(/* GraphQL */ ` + type Query { + a(b: Boolean): String + } + `); + const b = buildSchema(/* GraphQL */ ` + type Query { + a(b: Boolean!): String + } + `); + + const changes = await getBreakingChanges(a, b); + + expect(changes).toEqual([ + { + schemaChangeId: '0', + path: ['a', 'b'], + typeName: 'Query', + fieldName: 'b', + isArgument: true, + isNull: true, + }, + ]); + }); + + test('Change argument type from optional to required different', async () => { + const a = buildSchema(/* GraphQL */ ` + type Query { + a(b: Boolean): String + } + `); + const b = buildSchema(/* GraphQL */ ` + type Query { + a(b: String!): String + } + `); + + const changes = await getBreakingChanges(a, b); + + expect(changes).toEqual([ + { + schemaChangeId: '0', + path: ['a', 'b'], + typeName: 'Query', + fieldName: 'b', + isArgument: true, + }, + ]); + }); + + test('Change argument type from required to required different', async () => { + const a = buildSchema(/* GraphQL */ ` + type Query { + a(b: Boolean!): String + } + `); + const b = buildSchema(/* GraphQL */ ` + type Query { + a(b: String!): String + } + `); + + const changes = await getBreakingChanges(a, b); + + expect(changes).toEqual([ + { + schemaChangeId: '0', + path: ['a', 'b'], + typeName: 'Query', + fieldName: 'b', + isArgument: true, + }, + ]); + }); + + test('Change argument type from optional to optional different', async () => { + const a = buildSchema(/* GraphQL */ ` + type Query { + a(b: Boolean): String + } + `); + const b = buildSchema(/* GraphQL */ ` + type Query { + a(b: String): String + } + `); + + const changes = await getBreakingChanges(a, b); + + expect(changes).toEqual([ + { + schemaChangeId: '0', + path: ['a', 'b'], + typeName: 'Query', + fieldName: 'b', + isArgument: true, + isNull: false, + }, + ]); + }); }); describe('Input', (ctx) => { @@ -78,15 +228,67 @@ describe('Schema Change converter', (ctx) => { expect(changes).toEqual([ { - fieldName: 'b', + schemaChangeId: '0', + path: ['Foo'], isInput: true, + isNull: false, + }, + ]); + }); + + test('Remove a required input field', async () => { + const a = buildSchema(/* GraphQL */ ` + input Foo { + a: String! + b: String! + } + `); + const b = buildSchema(/* GraphQL */ ` + input Foo { + a: String! + } + `); + + const changes = await getBreakingChanges(a, b); + + expect(changes).toEqual([ + { schemaChangeId: '0', - typeName: 'Foo', + path: ['Foo'], + isInput: true, + isNull: false, }, ]); }); - test('Change the type of an Input field', async () => { + test('Remove an optional input field', async () => { + const a = buildSchema(/* GraphQL */ ` + input Foo { + a: String! + b: String + } + `); + const b = buildSchema(/* GraphQL */ ` + input Foo { + a: String! + } + `); + + const changes = await getBreakingChanges(a, b); + + // As we dont know whether the field is optional or required, we use the same condition as required fields + // We will not miss any breaking ops but will have some ops which might not be breaking + expect(changes).toEqual([ + { + schemaChangeId: '0', + path: ['Foo'], + isInput: true, + isNull: false, + }, + ]); + }); + + test('Change input field type from required to required different', async () => { const a = buildSchema(/* GraphQL */ ` input Foo { a: String! @@ -102,10 +304,84 @@ describe('Schema Change converter', (ctx) => { expect(changes).toEqual([ { + schemaChangeId: '0', + path: ['Foo'], + isInput: true, + isNull: false, + }, + ]); + }); + + test('Change input field type from optional to required same', async () => { + const a = buildSchema(/* GraphQL */ ` + input Foo { + a: String + } + `); + const b = buildSchema(/* GraphQL */ ` + input Foo { + a: String! + } + `); + + const changes = await getBreakingChanges(a, b); + + expect(changes).toEqual([ + { + schemaChangeId: '0', + typeName: 'Foo', fieldName: 'a', isInput: true, + isNull: true, + }, + ]); + }); + + test('Change input field type from optional to required different', async () => { + const a = buildSchema(/* GraphQL */ ` + input Foo { + a: String + } + `); + const b = buildSchema(/* GraphQL */ ` + input Foo { + a: Int! + } + `); + + const changes = await getBreakingChanges(a, b); + + expect(changes).toEqual([ + { + schemaChangeId: '0', + path: ['Foo'], + isInput: true, + isNull: false, + }, + ]); + }); + + test('Change input field type from optional to optional different', async () => { + const a = buildSchema(/* GraphQL */ ` + input Foo { + a: String + } + `); + const b = buildSchema(/* GraphQL */ ` + input Foo { + a: Int + } + `); + + const changes = await getBreakingChanges(a, b); + + expect(changes).toEqual([ + { schemaChangeId: '0', typeName: 'Foo', + fieldName: 'a', + isInput: true, + isNull: false, }, ]); }); @@ -135,8 +411,8 @@ describe('Schema Change converter', (ctx) => { typeName: 'Rocket', }, { - fieldName: 'a', schemaChangeId: '1', + fieldName: 'a', typeName: 'Query', }, ]); @@ -170,8 +446,8 @@ describe('Schema Change converter', (ctx) => { expect(changes).toEqual([ { - namedType: 'enumA', schemaChangeId: '0', + namedType: 'enumA', }, ]); }); @@ -199,7 +475,7 @@ describe('Schema Change converter', (ctx) => { async function getBreakingChanges(a: GraphQLSchema, b: GraphQLSchema): Promise { const changes = await getSchemaDiff(a, b); - return changes + const groups = changes .map((c, i) => toInspectorChange( { @@ -207,9 +483,12 @@ async function getBreakingChanges(a: GraphQLSchema, b: GraphQLSchema): Promise c !== null) as InspectorSchemaChange[]; + + return groups; } diff --git a/controlplane/src/core/services/SchemaUsageTrafficInspector.ts b/controlplane/src/core/services/SchemaUsageTrafficInspector.ts index d2a5e8cea1..9e5af4026c 100644 --- a/controlplane/src/core/services/SchemaUsageTrafficInspector.ts +++ b/controlplane/src/core/services/SchemaUsageTrafficInspector.ts @@ -1,7 +1,142 @@ -import { ChangeType } from '@graphql-inspector/core'; +import { + ChangeType, + FieldArgumentRemovedChange, + FieldArgumentTypeChangedChange, + InputFieldTypeChangedChange, +} from '@graphql-inspector/core'; +import type { NamedTypeNode, NonNullTypeNode, TypeNode } from 'graphql'; +import { parseType, print } from 'graphql'; +import { SchemaCheckChangeAction } from '../../db/models.js'; import { ClickHouseClient } from '../clickhouse/index.js'; import { SchemaDiff } from '../composition/schemaCheck.js'; -import { SchemaCheckChangeAction } from '../../db/models.js'; + +export enum FieldTypeChangeCategory { + /** + * Optional same type -> Required same type + * Example: "Boolean" -> "Boolean!" + */ + OPTIONAL_TO_REQUIRED_SAME = 'OPTIONAL_TO_REQUIRED_SAME', + /** + * Optional different type -> Required different type + * Example: "Boolean" -> "String!" + */ + OPTIONAL_TO_REQUIRED_DIFFERENT = 'OPTIONAL_TO_REQUIRED_DIFFERENT', + /** + * Required different type -> Required different type + * Example: "Boolean!" -> "String!" + */ + REQUIRED_TO_REQUIRED_DIFFERENT = 'REQUIRED_TO_REQUIRED_DIFFERENT', + /** + * Optional different type -> Optional different type + * Example: "Boolean" -> "String" + */ + OPTIONAL_TO_OPTIONAL_DIFFERENT = 'OPTIONAL_TO_OPTIONAL_DIFFERENT', +} + +/** + * Get the named type from a TypeNode AST + */ +function getNamedType(typeNode: TypeNode): NamedTypeNode { + if (typeNode.kind === 'NamedType') { + return typeNode; + } + if (typeNode.kind === 'NonNullType' || typeNode.kind === 'ListType') { + return getNamedType(typeNode.type); + } + throw new Error('Unexpected type node'); +} + +/** + * Determines the type change category from meta information using GraphQL's type parsing utilities. + * Works for both InputFieldTypeChanged and FieldArgumentTypeChanged. + * + * @param oldType - The old type from meta (e.g., oldInputFieldType or oldArgumentType) + * @param newType - The new type from meta (e.g., newInputFieldType or newArgumentType) + * @returns The category of the type change + * + * @example + * getTypeChangeCategory("Boolean!", "[Boolean!]!") + * // Returns FieldTypeChangeCategory.REQUIRED_TO_REQUIRED_DIFFERENT + * + * @example + * getTypeChangeCategory("SearchInput", "SearchInput!") + * // Returns FieldTypeChangeCategory.OPTIONAL_TO_REQUIRED_SAME + */ +export function getTypeChangeCategory(oldType: string, newType: string): FieldTypeChangeCategory { + // Parse type strings into AST using GraphQL's parseType + // Example 1: "Boolean!" -> NonNullType { type: NamedType { name: "Boolean" } } + // Example 2: "[Boolean!]!" -> NonNullType { type: ListType { type: NonNullType { type: NamedType { name: "Boolean" } } } } + // Example 3: "SearchInput" -> NamedType { name: "SearchInput" } + const oldTypeNode = parseType(oldType); + const newTypeNode = parseType(newType); + + // Check if types are required (NonNull) by checking the outermost wrapper + // Example 1: "Boolean!" -> fromRequired = true + // Example 2: "[Boolean]" -> fromRequired = false + // Example 3: "SearchInput" -> fromRequired = false + const fromRequired = oldTypeNode.kind === 'NonNullType'; + const toRequired = newTypeNode.kind === 'NonNullType'; + + // Get the named types (unwraps all wrappers like NonNull and List) + // Example 1: "[Boolean!]!" -> NamedType { name: "Boolean" } + // Example 2: "SearchInput" -> NamedType { name: "SearchInput" } + // Example 3: "[String]" -> NamedType { name: "String" } + const oldNamedType = getNamedType(oldTypeNode); + const newNamedType = getNamedType(newTypeNode); + + // Get base type names from the named type nodes + // Example 1: "[Boolean!]!" -> "Boolean" + // Example 2: "SearchInput" -> "SearchInput" + // Example 3: "[String]" -> "String" + const oldTypeName = oldNamedType.name.value; + const newTypeName = newNamedType.name.value; + + // Get normalized structure (without NonNull on the outermost layer) + // This preserves inner structure like [Type] vs Type + // Example 1: "Boolean!" -> normalized: "Boolean" + // Example 2: "[Boolean!]!" -> normalized: "[Boolean!]" + // Example 3: "[Boolean]" -> normalized: "[Boolean]" + // Example 4: "SearchInput" -> normalized: "SearchInput" + const oldNormalized = print(fromRequired ? (oldTypeNode as NonNullTypeNode).type : oldTypeNode); + const newNormalized = print(toRequired ? (newTypeNode as NonNullTypeNode).type : newTypeNode); + + // Check if base types are the same AND structure is the same + // Example 1: "Boolean" vs "Boolean!" -> sameBaseType: true, sameStructure: true + // Example 2: "Boolean" vs "[Boolean]" -> sameBaseType: true, sameStructure: false + // Example 3: "Boolean" vs "String" -> sameBaseType: false, sameStructure: false + // Example 4: "[Boolean!]" vs "[Boolean!]!" -> sameBaseType: true, sameStructure: true + const sameBaseType = oldTypeName === newTypeName; + const sameStructure = oldNormalized === newNormalized; + + // Types are considered "same" only if both base type and structure match + // Example 1: "Boolean" -> "Boolean!" -> sameType: true (same base + same structure) + // Example 2: "Boolean" -> "[Boolean]" -> sameType: false (same base but different structure) + // Example 3: "Boolean" -> "String" -> sameType: false (different base) + const sameType = sameBaseType && sameStructure; + + // Categorize based on the 4 cases + if (sameType && !fromRequired && toRequired) { + // Case 1: Optional same type -> Required same type + // Example: "Boolean" -> "Boolean!" + return FieldTypeChangeCategory.OPTIONAL_TO_REQUIRED_SAME; + } else if (!sameType && !fromRequired && toRequired) { + // Case 2: Optional different type -> Required different type + // Example: "Boolean" -> "String!" + return FieldTypeChangeCategory.OPTIONAL_TO_REQUIRED_DIFFERENT; + } else if (!sameType && fromRequired && toRequired) { + // Case 3: Required different type -> Required different type + // Example: "Boolean!" -> "String!" + return FieldTypeChangeCategory.REQUIRED_TO_REQUIRED_DIFFERENT; + } else if (!sameType && !fromRequired && !toRequired) { + // Case 4: Optional different type -> Optional different type + // Example: "Boolean" -> "String" + return FieldTypeChangeCategory.OPTIONAL_TO_OPTIONAL_DIFFERENT; + } else { + // Edge case: same type, from required, to optional (shouldn't happen in breaking changes) + // Fallback to same type becoming required + return FieldTypeChangeCategory.OPTIONAL_TO_REQUIRED_SAME; + } +} export interface InspectorSchemaChange { schemaChangeId: string; @@ -11,6 +146,7 @@ export interface InspectorSchemaChange { path?: string[]; isInput?: boolean; isArgument?: boolean; + isNull?: boolean; } export interface InspectorFilter { @@ -36,6 +172,7 @@ export class SchemaUsageTrafficInspector { /** * Inspect the usage of a schema change in the last X days on real traffic and return the * affected operations. We will consider all available compositions. + * @param changes - Array of inspector changes */ public async inspect( changes: InspectorSchemaChange[], @@ -71,16 +208,22 @@ export class SchemaUsageTrafficInspector { params.typeName = change.typeName; where.push(`hasAny(TypeNames, [{typeName:String}])`); } + // fieldName can be empty if a type was removed if (change.fieldName) { params.fieldName = change.fieldName; where.push(`FieldName = {fieldName:String}`); } + if (change.isInput) { where.push(`IsInput = true`); } else if (change.isArgument) { where.push(`IsArgument = true`); } + + if (change.isNull !== undefined) { + where.push(`IsNull = ${change.isNull}`); + } where.push(`IsIndirectFieldUsage = false`); const query = ` @@ -131,6 +274,7 @@ export class SchemaUsageTrafficInspector { /** * Convert schema changes to inspector changes. Will ignore a change if it is not inspectable. * Ultimately, will result in a breaking change because the change is not inspectable with the current implementation. + * Returns an array of inspector changes. */ public schemaChangesToInspectorChanges( schemaChanges: SchemaDiff[], @@ -205,6 +349,7 @@ export function collectOperationUsageStats(inspectorResult: InspectorOperationRe /** * Convert a schema change to an inspector change. Throws an error if the change is not supported. * Only breaking changes should be passed to this function because we only care about breaking changes. + * Returns an inspector change with the schemaChangeId included. */ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): InspectorSchemaChange | null { const path = change.path.split('.'); @@ -312,28 +457,162 @@ export function toInspectorChange(change: SchemaDiff, schemaCheckId: string): In }; } // 1. When the type of input field has changed, we know the exact type name and field name e.g. 'MyInput.name' - case ChangeType.InputFieldTypeChanged: + case ChangeType.InputFieldTypeChanged: { + // Use structured meta instead of parsing message + const meta = change.meta as InputFieldTypeChangedChange['meta']; + const inputFieldTypeChangeCategory = getTypeChangeCategory(meta.oldInputFieldType, meta.newInputFieldType); + switch (inputFieldTypeChangeCategory) { + case FieldTypeChangeCategory.OPTIONAL_TO_REQUIRED_SAME: { + // Int -> Int! + return { + schemaChangeId: schemaCheckId, + // if the input is used and the field is not passed, + // but now that it is required, its breaking + typeName: path[0], + fieldName: path[1], + isInput: true, + isNull: true, + }; + } + case FieldTypeChangeCategory.OPTIONAL_TO_REQUIRED_DIFFERENT: { + // Int -> Float! + // in this case, all the ops which have this input type are breaking + return { + schemaChangeId: schemaCheckId, + path: [path[0]], + isInput: true, + isNull: false, + }; + } + case FieldTypeChangeCategory.REQUIRED_TO_REQUIRED_DIFFERENT: { + // Int! -> Float! + // in this case, all the ops which have this input type are breaking + return { + schemaChangeId: schemaCheckId, + path: [path[0]], + isInput: true, + isNull: false, + }; + } + case FieldTypeChangeCategory.OPTIONAL_TO_OPTIONAL_DIFFERENT: { + // Int -> Float + // in this case, any ops which use the input field and are not null are breaking + return { + schemaChangeId: schemaCheckId, + typeName: path[0], + fieldName: path[1], + isInput: true, + isNull: false, + }; + } + default: { + throw new Error(`Unsupported input field type change category: ${inputFieldTypeChangeCategory}`); + } + } + } case ChangeType.InputFieldRemoved: case ChangeType.InputFieldAdded: { + // in these cases, all the ops which use this input type are breaking return { schemaChangeId: schemaCheckId, - fieldName: path[1], - typeName: path[0], + path: [path[0]], isInput: true, + isNull: false, }; } // 1. When an argument has changed, we know the exact path to the argument e.g. 'Query.engineer.id' // and the type name e.g. 'Query' - case ChangeType.FieldArgumentRemoved: - case ChangeType.FieldArgumentAdded: // Only when a required argument is added case ChangeType.FieldArgumentTypeChanged: { + // Use structured meta instead of parsing message + const meta = change.meta as FieldArgumentTypeChangedChange['meta']; + const argumentTypeChangeCategory = getTypeChangeCategory(meta.oldArgumentType, meta.newArgumentType); + switch (argumentTypeChangeCategory) { + case FieldTypeChangeCategory.OPTIONAL_TO_REQUIRED_SAME: { + // SearchInput -> SearchInput! + return { + schemaChangeId: schemaCheckId, + // if the argument is used and not passed (null), + // but now that it is required, its breaking + path: path.slice(1), // The path to the updated argument e.g. 'engineer.name' of the type names + typeName: path[0], + fieldName: path[2], + isArgument: true, + isNull: true, + }; + } + case FieldTypeChangeCategory.OPTIONAL_TO_REQUIRED_DIFFERENT: { + // SearchInput -> String! + // in this case, all the ops which have this argument are breaking + return { + schemaChangeId: schemaCheckId, + path: path.slice(1), // The path to the updated argument e.g. 'engineer.name' of the type names + typeName: path[0], + fieldName: path[2], + isArgument: true, + }; + } + case FieldTypeChangeCategory.REQUIRED_TO_REQUIRED_DIFFERENT: { + // SearchInput! -> String! + // in this case, all the ops which have this argument are breaking + return { + schemaChangeId: schemaCheckId, + path: path.slice(1), // The path to the updated argument e.g. 'engineer.name' of the type names + typeName: path[0], + fieldName: path[2], + isArgument: true, + }; + } + case FieldTypeChangeCategory.OPTIONAL_TO_OPTIONAL_DIFFERENT: { + // SearchInput -> String + // in this case, any ops which use the argument and are not null are breaking + return { + schemaChangeId: schemaCheckId, + path: path.slice(1), // The path to the updated argument e.g. 'engineer.name' of the type names + typeName: path[0], + fieldName: path[2], + isArgument: true, + isNull: false, + }; + } + default: { + throw new Error(`Unsupported argument type change category: ${argumentTypeChangeCategory}`); + } + } + } + + // Only when a required argument is added + case ChangeType.FieldArgumentAdded: { + // in this case, all the ops which have this argument are breaking return { schemaChangeId: schemaCheckId, - path: path.slice(1), // The path to the updated argument e.g. 'engineer.name' of the type names - typeName: path[0], // Enclosing type e.g. 'Query' or 'Engineer' when the argument is on a field of type Engineer - isArgument: true, + // e.g. if the path recieved is 'Query.employee.a', the path should be ['employee'] as its new field or it has changed the type of the argument, we check the usage of the operation. + path: path.slice(1, 2), + typeName: path[0], }; } + case ChangeType.FieldArgumentRemoved: { + // Use structured meta instead of parsing message + const meta = change.meta as FieldArgumentRemovedChange['meta']; + const isRequired = meta.removedFieldType.endsWith('!'); + if (isRequired) { + // in this case, all the ops which use this argument are breaking + return { + schemaChangeId: schemaCheckId, + // e.g. if the path recieved is 'Query.employee.a', the path should be ['employee'] as its new field or it has changed the type of the argument, we check the usage of the operation. + path: path.slice(1, 2), + typeName: path[0], + }; + } else { + // in this case, any ops which use the argument and are not null are breaking + return { + schemaChangeId: schemaCheckId, + path: path.slice(1), // The path to the updated argument e.g. 'engineer.name' of the type names + typeName: path[0], + isArgument: true, + isNull: false, + }; + } + } } // no return to enforce that all cases are handled } diff --git a/controlplane/test/breaking-changes.test.ts b/controlplane/test/breaking-changes.test.ts index 0a0358e0de..fa61ac65ed 100644 --- a/controlplane/test/breaking-changes.test.ts +++ b/controlplane/test/breaking-changes.test.ts @@ -153,18 +153,28 @@ describe('BreakingChanges', () => { changeType: SchemaChangeType.TYPE_ADDED, path: 'openfed__FieldSet', isBreaking: false, + meta: { + addedTypeName: 'openfed__FieldSet', + }, }, { message: "Directive 'key' was added to object 'User'", changeType: SchemaChangeType.DIRECTIVE_USAGE_OBJECT_ADDED, path: 'User.key', isBreaking: false, + meta: { + addedDirectiveName: 'key', + objectName: 'User', + }, }, { message: "Directive 'key' was added", changeType: SchemaChangeType.DIRECTIVE_ADDED, path: '@key', isBreaking: false, + meta: { + addedDirectiveName: 'key', + }, }, ]); } diff --git a/graphqlmetrics/core/metrics_service.go b/graphqlmetrics/core/metrics_service.go index 514dbbd44f..e7c97ae81b 100644 --- a/graphqlmetrics/core/metrics_service.go +++ b/graphqlmetrics/core/metrics_service.go @@ -338,10 +338,11 @@ func (s *MetricsService) appendUsageMetrics( strconv.FormatInt(int64(schemaUsage.RequestInfo.StatusCode), 10), schemaUsage.RequestInfo.Error, fieldUsage.SubgraphIDs, - false, - false, + false, // IsArgument + false, // IsInput schemaUsage.Attributes, fieldUsage.IndirectInterfaceField, + false, // IsNull - not applicable for field metrics ) if err != nil { return fmt.Errorf("failed to append field metric to batch: %w", err) @@ -350,6 +351,13 @@ func (s *MetricsService) appendUsageMetrics( for _, argumentUsage := range schemaUsage.ArgumentMetrics { + // Sort stable for fields where the order doesn't matter + // This reduce cardinality and improves compression + + sort.SliceStable(argumentUsage.SubgraphIDs, func(i, j int) bool { + return argumentUsage.SubgraphIDs[i] < argumentUsage.SubgraphIDs[j] + }) + err := metricBatch.Append( insertTime, claims.OrganizationID, @@ -366,11 +374,12 @@ func (s *MetricsService) appendUsageMetrics( schemaUsage.ClientInfo.Version, strconv.FormatInt(int64(schemaUsage.RequestInfo.StatusCode), 10), schemaUsage.RequestInfo.Error, - []string{}, - true, - false, + argumentUsage.SubgraphIDs, + true, // IsArgument + false, // IsInput schemaUsage.Attributes, - false, + false, // IsIndirectFieldUsage + argumentUsage.IsNull, ) if err != nil { return fmt.Errorf("failed to append argument metric to batch: %w", err) @@ -379,6 +388,13 @@ func (s *MetricsService) appendUsageMetrics( for _, inputUsage := range schemaUsage.InputMetrics { + // Sort stable for fields where the order doesn't matter + // This reduce cardinality and improves compression + + sort.SliceStable(inputUsage.SubgraphIDs, func(i, j int) bool { + return inputUsage.SubgraphIDs[i] < inputUsage.SubgraphIDs[j] + }) + err := metricBatch.Append( insertTime, claims.OrganizationID, @@ -395,11 +411,12 @@ func (s *MetricsService) appendUsageMetrics( schemaUsage.ClientInfo.Version, strconv.FormatInt(int64(schemaUsage.RequestInfo.StatusCode), 10), schemaUsage.RequestInfo.Error, - []string{}, + inputUsage.SubgraphIDs, false, true, schemaUsage.Attributes, false, + inputUsage.IsNull, ) if err != nil { return fmt.Errorf("failed to append input metric to batch: %w", err) diff --git a/graphqlmetrics/gen/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.pb.go b/graphqlmetrics/gen/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.pb.go index 107671e28f..18cae56548 100644 --- a/graphqlmetrics/gen/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.pb.go +++ b/graphqlmetrics/gen/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.pb.go @@ -579,6 +579,11 @@ type ArgumentUsageInfo struct { Count uint64 `protobuf:"varint,3,opt,name=Count,proto3" json:"Count,omitempty"` // NamedType is the underlying type of the argument NamedType string `protobuf:"bytes,4,opt,name=NamedType,proto3" json:"NamedType,omitempty"` + // SubgraphIDs is the list of datasource IDs (e.g subgraph ID) that the argument is used from + SubgraphIDs []string `protobuf:"bytes,5,rep,name=SubgraphIDs,proto3" json:"SubgraphIDs,omitempty"` + // IsNull indicates whether this argument was explicitly set to null + // This is critical for detecting breaking changes when optional arguments become required + IsNull bool `protobuf:"varint,6,opt,name=IsNull,proto3" json:"IsNull,omitempty"` } func (x *ArgumentUsageInfo) Reset() { @@ -641,6 +646,20 @@ func (x *ArgumentUsageInfo) GetNamedType() string { return "" } +func (x *ArgumentUsageInfo) GetSubgraphIDs() []string { + if x != nil { + return x.SubgraphIDs + } + return nil +} + +func (x *ArgumentUsageInfo) GetIsNull() bool { + if x != nil { + return x.IsNull + } + return false +} + type InputUsageInfo struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -656,6 +675,11 @@ type InputUsageInfo struct { NamedType string `protobuf:"bytes,4,opt,name=NamedType,proto3" json:"NamedType,omitempty"` // EnumValues is an empty list if the input field is not an enum, otherwise it contains the list of used enum values EnumValues []string `protobuf:"bytes,5,rep,name=EnumValues,proto3" json:"EnumValues,omitempty"` + // SubgraphIDs is the list of datasource IDs (e.g subgraph ID) that the input is used from + SubgraphIDs []string `protobuf:"bytes,6,rep,name=SubgraphIDs,proto3" json:"SubgraphIDs,omitempty"` + // IsNull indicates whether this input was explicitly or implicitly null + // This is critical for detecting breaking changes when optional fields become required + IsNull bool `protobuf:"varint,7,opt,name=IsNull,proto3" json:"IsNull,omitempty"` } func (x *InputUsageInfo) Reset() { @@ -725,6 +749,20 @@ func (x *InputUsageInfo) GetEnumValues() []string { return nil } +func (x *InputUsageInfo) GetSubgraphIDs() []string { + if x != nil { + return x.SubgraphIDs + } + return nil +} + +func (x *InputUsageInfo) GetIsNull() bool { + if x != nil { + return x.IsNull + } + return false +} + type PublishGraphQLRequestMetricsRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -994,91 +1032,99 @@ var file_wg_cosmo_graphqlmetrics_v1_graphqlmetrics_proto_rawDesc = []byte{ 0x72, 0x65, 0x63, 0x74, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x16, 0x49, 0x6e, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x46, 0x69, 0x65, 0x6c, 0x64, - 0x22, 0x77, 0x0a, 0x11, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x55, 0x73, 0x61, 0x67, - 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x50, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x09, 0x52, 0x04, 0x50, 0x61, 0x74, 0x68, 0x12, 0x1a, 0x0a, 0x08, 0x54, 0x79, 0x70, - 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x54, 0x79, 0x70, - 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x4e, - 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, - 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x22, 0x94, 0x01, 0x0a, 0x0e, 0x49, 0x6e, - 0x70, 0x75, 0x74, 0x55, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, - 0x50, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x50, 0x61, 0x74, 0x68, - 0x12, 0x1a, 0x0a, 0x08, 0x54, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x08, 0x54, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, - 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x43, 0x6f, 0x75, - 0x6e, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, - 0x12, 0x1e, 0x0a, 0x0a, 0x45, 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x05, - 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x45, 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, - 0x22, 0x74, 0x0a, 0x23, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x47, 0x72, 0x61, 0x70, 0x68, + 0x22, 0xb1, 0x01, 0x0a, 0x11, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x55, 0x73, 0x61, + 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x50, 0x61, 0x74, 0x68, 0x18, 0x01, + 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x50, 0x61, 0x74, 0x68, 0x12, 0x1a, 0x0a, 0x08, 0x54, 0x79, + 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x54, 0x79, + 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x1c, 0x0a, 0x09, + 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x53, 0x75, + 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x49, 0x44, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, + 0x0b, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x49, 0x44, 0x73, 0x12, 0x16, 0x0a, 0x06, + 0x49, 0x73, 0x4e, 0x75, 0x6c, 0x6c, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x49, 0x73, + 0x4e, 0x75, 0x6c, 0x6c, 0x22, 0xce, 0x01, 0x0a, 0x0e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x55, 0x73, + 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x50, 0x61, 0x74, 0x68, 0x18, + 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x50, 0x61, 0x74, 0x68, 0x12, 0x1a, 0x0a, 0x08, 0x54, + 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x54, + 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x1c, 0x0a, + 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x45, + 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, + 0x0a, 0x45, 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x20, 0x0a, 0x0b, 0x53, + 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x49, 0x44, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x09, + 0x52, 0x0b, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x49, 0x44, 0x73, 0x12, 0x16, 0x0a, + 0x06, 0x49, 0x73, 0x4e, 0x75, 0x6c, 0x6c, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x49, + 0x73, 0x4e, 0x75, 0x6c, 0x6c, 0x22, 0x74, 0x0a, 0x23, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, + 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, + 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x4d, 0x0a, 0x0b, + 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x55, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x2b, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, + 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x53, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x55, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x0b, + 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x55, 0x73, 0x61, 0x67, 0x65, 0x22, 0x28, 0x0a, 0x26, 0x50, + 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, + 0x6f, 0x76, 0x65, 0x72, 0x61, 0x67, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x89, 0x01, 0x0a, 0x2d, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, + 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x4d, 0x0a, 0x0b, 0x53, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x55, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x77, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x58, 0x0a, 0x0b, 0x41, 0x67, 0x67, 0x72, 0x65, + 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x36, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, - 0x55, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x0b, 0x53, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x55, 0x73, 0x61, 0x67, 0x65, 0x22, 0x28, 0x0a, 0x26, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, - 0x68, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x76, 0x65, 0x72, 0x61, - 0x67, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x22, 0x89, 0x01, 0x0a, 0x2d, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, + 0x55, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0b, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x22, 0x30, 0x0a, 0x2e, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x58, 0x0a, 0x0b, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x36, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, - 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, - 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x55, 0x73, 0x61, 0x67, 0x65, - 0x49, 0x6e, 0x66, 0x6f, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, - 0x0b, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x30, 0x0a, 0x2e, - 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, - 0x64, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, - 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2a, 0x3a, - 0x0a, 0x0d, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x12, - 0x09, 0x0a, 0x05, 0x51, 0x55, 0x45, 0x52, 0x59, 0x10, 0x00, 0x12, 0x0c, 0x0a, 0x08, 0x4d, 0x55, - 0x54, 0x41, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x01, 0x12, 0x10, 0x0a, 0x0c, 0x53, 0x55, 0x42, 0x53, - 0x43, 0x52, 0x49, 0x50, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x02, 0x32, 0xf5, 0x02, 0x0a, 0x15, 0x47, - 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x53, 0x65, 0x72, - 0x76, 0x69, 0x63, 0x65, 0x12, 0x9e, 0x01, 0x0a, 0x15, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, - 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x12, 0x3f, + 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x2a, 0x3a, 0x0a, 0x0d, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x54, 0x79, 0x70, 0x65, 0x12, 0x09, 0x0a, 0x05, 0x51, 0x55, 0x45, 0x52, 0x59, 0x10, 0x00, 0x12, + 0x0c, 0x0a, 0x08, 0x4d, 0x55, 0x54, 0x41, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x01, 0x12, 0x10, 0x0a, + 0x0c, 0x53, 0x55, 0x42, 0x53, 0x43, 0x52, 0x49, 0x50, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x02, 0x32, + 0xf5, 0x02, 0x0a, 0x15, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x4d, 0x65, 0x74, 0x72, 0x69, + 0x63, 0x73, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x9e, 0x01, 0x0a, 0x15, 0x50, 0x75, + 0x62, 0x6c, 0x69, 0x73, 0x68, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x4d, 0x65, 0x74, 0x72, + 0x69, 0x63, 0x73, 0x12, 0x3f, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, + 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, + 0x2e, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x42, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, + 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, + 0x31, 0x2e, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x43, 0x6f, 0x76, 0x65, 0x72, 0x61, 0x67, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0xba, 0x01, 0x0a, 0x1f, 0x50, + 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, + 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x12, 0x49, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x75, 0x62, 0x6c, - 0x69, 0x73, 0x68, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x42, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, - 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x75, 0x62, - 0x6c, 0x69, 0x73, 0x68, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x76, - 0x65, 0x72, 0x61, 0x67, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0xba, 0x01, 0x0a, 0x1f, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, - 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, 0x47, 0x72, 0x61, 0x70, 0x68, - 0x51, 0x4c, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x12, 0x49, 0x2e, 0x77, 0x67, 0x2e, 0x63, + 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, 0x47, 0x72, 0x61, + 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, + 0x63, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x4a, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x1a, 0x4a, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, - 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, - 0x31, 0x2e, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, - 0x74, 0x65, 0x64, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x22, 0x00, 0x42, 0xa3, 0x02, 0x0a, 0x1e, 0x63, 0x6f, 0x6d, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, - 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, - 0x63, 0x73, 0x2e, 0x76, 0x31, 0x42, 0x13, 0x47, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, - 0x74, 0x72, 0x69, 0x63, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x61, 0x67, 0x69, - 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x77, 0x75, 0x6e, 0x64, 0x65, 0x72, 0x67, - 0x72, 0x61, 0x70, 0x68, 0x2f, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2f, 0x67, 0x72, 0x61, 0x70, 0x68, - 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x2f, 0x77, 0x67, 0x2f, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2f, 0x67, 0x72, 0x61, - 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2f, 0x76, 0x31, 0x3b, 0x67, - 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x76, 0x31, 0xa2, - 0x02, 0x03, 0x57, 0x43, 0x47, 0xaa, 0x02, 0x1a, 0x57, 0x67, 0x2e, 0x43, 0x6f, 0x73, 0x6d, 0x6f, - 0x2e, 0x47, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, - 0x56, 0x31, 0xca, 0x02, 0x1a, 0x57, 0x67, 0x5c, 0x43, 0x6f, 0x73, 0x6d, 0x6f, 0x5c, 0x47, 0x72, - 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x5c, 0x56, 0x31, 0xe2, - 0x02, 0x26, 0x57, 0x67, 0x5c, 0x43, 0x6f, 0x73, 0x6d, 0x6f, 0x5c, 0x47, 0x72, 0x61, 0x70, 0x68, - 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x5c, 0x56, 0x31, 0x5c, 0x47, 0x50, 0x42, - 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x1d, 0x57, 0x67, 0x3a, 0x3a, 0x43, - 0x6f, 0x73, 0x6d, 0x6f, 0x3a, 0x3a, 0x47, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, - 0x72, 0x69, 0x63, 0x73, 0x3a, 0x3a, 0x56, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0xa3, 0x02, 0x0a, 0x1e, 0x63, 0x6f, 0x6d, 0x2e, + 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, + 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, 0x42, 0x13, 0x47, 0x72, 0x61, 0x70, + 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, + 0x01, 0x5a, 0x61, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x77, 0x75, + 0x6e, 0x64, 0x65, 0x72, 0x67, 0x72, 0x61, 0x70, 0x68, 0x2f, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2f, + 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2f, 0x67, + 0x65, 0x6e, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x77, 0x67, 0x2f, 0x63, 0x6f, 0x73, 0x6d, + 0x6f, 0x2f, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, + 0x2f, 0x76, 0x31, 0x3b, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, + 0x63, 0x73, 0x76, 0x31, 0xa2, 0x02, 0x03, 0x57, 0x43, 0x47, 0xaa, 0x02, 0x1a, 0x57, 0x67, 0x2e, + 0x43, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x47, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, + 0x72, 0x69, 0x63, 0x73, 0x2e, 0x56, 0x31, 0xca, 0x02, 0x1a, 0x57, 0x67, 0x5c, 0x43, 0x6f, 0x73, + 0x6d, 0x6f, 0x5c, 0x47, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, + 0x73, 0x5c, 0x56, 0x31, 0xe2, 0x02, 0x26, 0x57, 0x67, 0x5c, 0x43, 0x6f, 0x73, 0x6d, 0x6f, 0x5c, + 0x47, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x5c, 0x56, + 0x31, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x1d, + 0x57, 0x67, 0x3a, 0x3a, 0x43, 0x6f, 0x73, 0x6d, 0x6f, 0x3a, 0x3a, 0x47, 0x72, 0x61, 0x70, 0x68, + 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x3a, 0x3a, 0x56, 0x31, 0x62, 0x06, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/graphqlmetrics/migrations/20251120223520_add_is_null_to_schema_usage.sql b/graphqlmetrics/migrations/20251120223520_add_is_null_to_schema_usage.sql new file mode 100644 index 0000000000..36885152aa --- /dev/null +++ b/graphqlmetrics/migrations/20251120223520_add_is_null_to_schema_usage.sql @@ -0,0 +1,7 @@ +-- migrate:up + +ALTER TABLE gql_metrics_schema_usage ADD COLUMN IF NOT EXISTS IsNull bool DEFAULT false CODEC(ZSTD(3)); + +-- migrate:down + +ALTER TABLE gql_metrics_schema_usage DROP COLUMN IF EXISTS IsNull; diff --git a/graphqlmetrics/migrations/20251120223529_add_is_null_to_schema_usage_5m_90d.sql b/graphqlmetrics/migrations/20251120223529_add_is_null_to_schema_usage_5m_90d.sql new file mode 100644 index 0000000000..c77604f333 --- /dev/null +++ b/graphqlmetrics/migrations/20251120223529_add_is_null_to_schema_usage_5m_90d.sql @@ -0,0 +1,7 @@ +-- migrate:up + +ALTER TABLE gql_metrics_schema_usage_5m_90d ADD COLUMN IF NOT EXISTS IsNull bool DEFAULT false CODEC(ZSTD(3)); + +-- migrate:down + +ALTER TABLE gql_metrics_schema_usage_5m_90d DROP COLUMN IF EXISTS IsNull; diff --git a/graphqlmetrics/migrations/20251120223537_add_is_null_to_schema_usage_lite_1d_90d.sql b/graphqlmetrics/migrations/20251120223537_add_is_null_to_schema_usage_lite_1d_90d.sql new file mode 100644 index 0000000000..b5f4445867 --- /dev/null +++ b/graphqlmetrics/migrations/20251120223537_add_is_null_to_schema_usage_lite_1d_90d.sql @@ -0,0 +1,7 @@ +-- migrate:up + +ALTER TABLE gql_metrics_schema_usage_lite_1d_90d ADD COLUMN IF NOT EXISTS IsNull bool DEFAULT false CODEC(ZSTD(3)); + +-- migrate:down + +ALTER TABLE gql_metrics_schema_usage_lite_1d_90d DROP COLUMN IF EXISTS IsNull; diff --git a/graphqlmetrics/migrations/20251120223910_drop_gql_schema_usage_5m_90d_mv_for_is_null.sql b/graphqlmetrics/migrations/20251120223910_drop_gql_schema_usage_5m_90d_mv_for_is_null.sql new file mode 100644 index 0000000000..b8842304be --- /dev/null +++ b/graphqlmetrics/migrations/20251120223910_drop_gql_schema_usage_5m_90d_mv_for_is_null.sql @@ -0,0 +1,50 @@ +-- migrate:up + +DROP VIEW IF EXISTS gql_metrics_schema_usage_5m_90d_mv; + +-- migrate:down + +CREATE MATERIALIZED VIEW IF NOT EXISTS gql_metrics_schema_usage_5m_90d_mv TO gql_metrics_schema_usage_5m_90d AS +SELECT + toStartOfFiveMinute(Timestamp) as Timestamp, + toLowCardinality(OrganizationID) as OrganizationID, + toLowCardinality(FederatedGraphID) as FederatedGraphID, + toLowCardinality(RouterConfigVersion) as RouterConfigVersion, + toLowCardinality(OperationHash) as OperationHash, + toLowCardinality(OperationName) as OperationName, + toLowCardinality(OperationType) as OperationType, + Path as Path, + toLowCardinality(arrayElement(Path, -1)) as FieldName, + TypeNames as TypeNames, + toLowCardinality(NamedType) as NamedType, + toLowCardinality(ClientName) as ClientName, + toLowCardinality(ClientVersion) as ClientVersion, + SubgraphIDs as SubgraphIDs, + IsArgument as IsArgument, + IsInput as IsInput, + sum(Count) as TotalUsages, + sumIf(Count, HasError OR position(HttpStatusCode,'5') = 1 OR position(HttpStatusCode,'4') = 1) as TotalErrors, + sumIf(Count, position(HttpStatusCode,'4') = 1) AS TotalClientErrors, + IsIndirectFieldUsage as IsIndirectFieldUsage +FROM gql_metrics_schema_usage +GROUP BY + Timestamp, + OperationHash, + OperationName, + OperationType, + FederatedGraphID, + RouterConfigVersion, + OrganizationID, + OperationType, + ClientName, + ClientVersion, + Path, + FieldName, + NamedType, + TypeNames, + SubgraphIDs, + IsArgument, + IsInput, + IsIndirectFieldUsage +ORDER BY + Timestamp; diff --git a/graphqlmetrics/migrations/20251120223951_recreate_gql_schema_usage_5m_90d_mv_with_is_null.sql b/graphqlmetrics/migrations/20251120223951_recreate_gql_schema_usage_5m_90d_mv_with_is_null.sql new file mode 100644 index 0000000000..f28d6f9d6d --- /dev/null +++ b/graphqlmetrics/migrations/20251120223951_recreate_gql_schema_usage_5m_90d_mv_with_is_null.sql @@ -0,0 +1,51 @@ +-- migrate:up + +CREATE MATERIALIZED VIEW IF NOT EXISTS gql_metrics_schema_usage_5m_90d_mv TO gql_metrics_schema_usage_5m_90d AS +SELECT + toStartOfFiveMinute(Timestamp) as Timestamp, + toLowCardinality(OrganizationID) as OrganizationID, + toLowCardinality(FederatedGraphID) as FederatedGraphID, + toLowCardinality(RouterConfigVersion) as RouterConfigVersion, + toLowCardinality(OperationHash) as OperationHash, + toLowCardinality(OperationName) as OperationName, + toLowCardinality(OperationType) as OperationType, + Path as Path, + toLowCardinality(arrayElement(Path, -1)) as FieldName, + TypeNames as TypeNames, + toLowCardinality(NamedType) as NamedType, + toLowCardinality(ClientName) as ClientName, + toLowCardinality(ClientVersion) as ClientVersion, + SubgraphIDs as SubgraphIDs, + IsArgument as IsArgument, + IsInput as IsInput, + sum(Count) as TotalUsages, + sumIf(Count, HasError OR position(HttpStatusCode,'5') = 1 OR position(HttpStatusCode,'4') = 1) as TotalErrors, + sumIf(Count, position(HttpStatusCode,'4') = 1) AS TotalClientErrors, + IsIndirectFieldUsage as IsIndirectFieldUsage, + IsNull as IsNull +FROM gql_metrics_schema_usage +GROUP BY + Timestamp, + OperationHash, + OperationName, + OperationType, + FederatedGraphID, + RouterConfigVersion, + OrganizationID, + ClientName, + ClientVersion, + Path, + FieldName, + NamedType, + TypeNames, + SubgraphIDs, + IsArgument, + IsInput, + IsIndirectFieldUsage, + IsNull +ORDER BY + Timestamp; + +-- migrate:down + +DROP VIEW IF EXISTS gql_metrics_schema_usage_5m_90d_mv; diff --git a/graphqlmetrics/migrations/20251120224005_drop_gql_schema_usage_1d_90d_mv_for_is_null.sql b/graphqlmetrics/migrations/20251120224005_drop_gql_schema_usage_1d_90d_mv_for_is_null.sql new file mode 100644 index 0000000000..adf13ebf25 --- /dev/null +++ b/graphqlmetrics/migrations/20251120224005_drop_gql_schema_usage_1d_90d_mv_for_is_null.sql @@ -0,0 +1,26 @@ +-- migrate:up + +DROP VIEW IF EXISTS gql_metrics_schema_usage_lite_1d_90d_mv; + +-- migrate:down + +CREATE MATERIALIZED VIEW IF NOT EXISTS gql_metrics_schema_usage_lite_1d_90d_mv TO gql_metrics_schema_usage_lite_1d_90d AS +SELECT + toStartOfDay(Timestamp) as Timestamp, + toLowCardinality(OrganizationID) as OrganizationID, + toLowCardinality(FederatedGraphID) as FederatedGraphID, + toLowCardinality(RouterConfigVersion) as RouterConfigVersion, + toLowCardinality(OperationHash) as OperationHash, + toLowCardinality(OperationName) as OperationName, + toLowCardinality(OperationType) as OperationType, + Path as Path, + toLowCardinality(arrayElement(Path, -1)) as FieldName, + TypeNames as TypeNames, + toLowCardinality(NamedType) as NamedType, + toLowCardinality(ClientName) as ClientName, + toLowCardinality(ClientVersion) as ClientVersion, + SubgraphIDs as SubgraphIDs, + IsArgument as IsArgument, + IsInput as IsInput, + IsIndirectFieldUsage as IsIndirectFieldUsage +FROM gql_metrics_schema_usage; diff --git a/graphqlmetrics/migrations/20251120224017_recreate_gql_schema_usage_1d_90d_mv_with_is_null.sql b/graphqlmetrics/migrations/20251120224017_recreate_gql_schema_usage_1d_90d_mv_with_is_null.sql new file mode 100644 index 0000000000..3f44a09646 --- /dev/null +++ b/graphqlmetrics/migrations/20251120224017_recreate_gql_schema_usage_1d_90d_mv_with_is_null.sql @@ -0,0 +1,27 @@ +-- migrate:up + +CREATE MATERIALIZED VIEW IF NOT EXISTS gql_metrics_schema_usage_lite_1d_90d_mv TO gql_metrics_schema_usage_lite_1d_90d AS +SELECT + toStartOfDay(Timestamp) as Timestamp, + toLowCardinality(OrganizationID) as OrganizationID, + toLowCardinality(FederatedGraphID) as FederatedGraphID, + toLowCardinality(RouterConfigVersion) as RouterConfigVersion, + toLowCardinality(OperationHash) as OperationHash, + toLowCardinality(OperationName) as OperationName, + toLowCardinality(OperationType) as OperationType, + Path as Path, + toLowCardinality(arrayElement(Path, -1)) as FieldName, + TypeNames as TypeNames, + toLowCardinality(NamedType) as NamedType, + toLowCardinality(ClientName) as ClientName, + toLowCardinality(ClientVersion) as ClientVersion, + SubgraphIDs as SubgraphIDs, + IsArgument as IsArgument, + IsInput as IsInput, + IsIndirectFieldUsage as IsIndirectFieldUsage, + IsNull as IsNull +FROM gql_metrics_schema_usage; + +-- migrate:down + +DROP VIEW IF EXISTS gql_metrics_schema_usage_lite_1d_90d_mv; diff --git a/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.proto b/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.proto index 29800621ac..5567ce6d1a 100644 --- a/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.proto +++ b/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.proto @@ -89,6 +89,11 @@ message ArgumentUsageInfo { uint64 Count = 3; // NamedType is the underlying type of the argument string NamedType = 4; + // SubgraphIDs is the list of datasource IDs (e.g subgraph ID) that the argument is used from + repeated string SubgraphIDs = 5; + // IsNull indicates whether this argument was explicitly set to null + // This is critical for detecting breaking changes when optional arguments become required + bool IsNull = 6; } message InputUsageInfo { @@ -102,6 +107,11 @@ message InputUsageInfo { string NamedType = 4; // EnumValues is an empty list if the input field is not an enum, otherwise it contains the list of used enum values repeated string EnumValues = 5; + // SubgraphIDs is the list of datasource IDs (e.g subgraph ID) that the input is used from + repeated string SubgraphIDs = 6; + // IsNull indicates whether this input was explicitly or implicitly null + // This is critical for detecting breaking changes when optional fields become required + bool IsNull = 7; } message PublishGraphQLRequestMetricsRequest { diff --git a/router/core/operation_planner.go b/router/core/operation_planner.go index 40c8d6701e..38c3b6aac5 100644 --- a/router/core/operation_planner.go +++ b/router/core/operation_planner.go @@ -83,7 +83,7 @@ func (p *OperationPlanner) preparePlan(ctx *operationContext) (*planWithMetaData if p.trackUsageInfo { out.typeFieldUsageInfo = graphqlschemausage.GetTypeFieldUsageInfo(preparedPlan) - out.argumentUsageInfo, err = graphqlschemausage.GetArgumentUsageInfo(&doc, p.executor.RouterSchema) + out.argumentUsageInfo, err = graphqlschemausage.GetArgumentUsageInfo(&doc, p.executor.RouterSchema, ctx.variables, preparedPlan, ctx.remapVariables) if err != nil { return nil, err } @@ -114,7 +114,7 @@ func (p *OperationPlanner) plan(opContext *operationContext, options PlanOptions if options.TrackSchemaUsageInfo { opContext.typeFieldUsageInfo = prepared.typeFieldUsageInfo opContext.argumentUsageInfo = prepared.argumentUsageInfo - opContext.inputUsageInfo, err = graphqlschemausage.GetInputUsageInfo(prepared.operationDocument, p.executor.RouterSchema, opContext.variables) + opContext.inputUsageInfo, err = graphqlschemausage.GetInputUsageInfo(prepared.operationDocument, p.executor.RouterSchema, opContext.variables, prepared.preparedPlan, opContext.remapVariables) if err != nil { return err } @@ -152,7 +152,7 @@ func (p *OperationPlanner) plan(opContext *operationContext, options PlanOptions if options.TrackSchemaUsageInfo { opContext.typeFieldUsageInfo = opContext.preparedPlan.typeFieldUsageInfo opContext.argumentUsageInfo = opContext.preparedPlan.argumentUsageInfo - opContext.inputUsageInfo, err = graphqlschemausage.GetInputUsageInfo(opContext.preparedPlan.operationDocument, p.executor.RouterSchema, opContext.variables) + opContext.inputUsageInfo, err = graphqlschemausage.GetInputUsageInfo(opContext.preparedPlan.operationDocument, p.executor.RouterSchema, opContext.variables, opContext.preparedPlan.preparedPlan, opContext.remapVariables) if err != nil { return err } diff --git a/router/debug.config.yaml b/router/debug.config.yaml index 0f241074aa..10b042397b 100644 --- a/router/debug.config.yaml +++ b/router/debug.config.yaml @@ -5,6 +5,10 @@ version: '1' +#graphql_metrics: +# enabled: true +# collector_endpoint: http://localhost:4005 + execution_config: file: path: './__schemas/config.json' diff --git a/router/demo.config.yaml b/router/demo.config.yaml index 9a72e31de2..ccea543c6d 100644 --- a/router/demo.config.yaml +++ b/router/demo.config.yaml @@ -19,4 +19,4 @@ events: redis: - id: my-redis urls: - - "redis://localhost:6379/2" \ No newline at end of file + - "redis://localhost:6379/2" \ No newline at end of file diff --git a/router/gen/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.pb.go b/router/gen/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.pb.go index 7127c79946..7ff1ce41e2 100644 --- a/router/gen/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.pb.go +++ b/router/gen/proto/wg/cosmo/graphqlmetrics/v1/graphqlmetrics.pb.go @@ -579,6 +579,11 @@ type ArgumentUsageInfo struct { Count uint64 `protobuf:"varint,3,opt,name=Count,proto3" json:"Count,omitempty"` // NamedType is the underlying type of the argument NamedType string `protobuf:"bytes,4,opt,name=NamedType,proto3" json:"NamedType,omitempty"` + // SubgraphIDs is the list of datasource IDs (e.g subgraph ID) that the argument is used from + SubgraphIDs []string `protobuf:"bytes,5,rep,name=SubgraphIDs,proto3" json:"SubgraphIDs,omitempty"` + // IsNull indicates whether this argument was explicitly set to null + // This is critical for detecting breaking changes when optional arguments become required + IsNull bool `protobuf:"varint,6,opt,name=IsNull,proto3" json:"IsNull,omitempty"` } func (x *ArgumentUsageInfo) Reset() { @@ -641,6 +646,20 @@ func (x *ArgumentUsageInfo) GetNamedType() string { return "" } +func (x *ArgumentUsageInfo) GetSubgraphIDs() []string { + if x != nil { + return x.SubgraphIDs + } + return nil +} + +func (x *ArgumentUsageInfo) GetIsNull() bool { + if x != nil { + return x.IsNull + } + return false +} + type InputUsageInfo struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -656,6 +675,11 @@ type InputUsageInfo struct { NamedType string `protobuf:"bytes,4,opt,name=NamedType,proto3" json:"NamedType,omitempty"` // EnumValues is an empty list if the input field is not an enum, otherwise it contains the list of used enum values EnumValues []string `protobuf:"bytes,5,rep,name=EnumValues,proto3" json:"EnumValues,omitempty"` + // SubgraphIDs is the list of datasource IDs (e.g subgraph ID) that the input is used from + SubgraphIDs []string `protobuf:"bytes,6,rep,name=SubgraphIDs,proto3" json:"SubgraphIDs,omitempty"` + // IsNull indicates whether this input was explicitly or implicitly null + // This is critical for detecting breaking changes when optional fields become required + IsNull bool `protobuf:"varint,7,opt,name=IsNull,proto3" json:"IsNull,omitempty"` } func (x *InputUsageInfo) Reset() { @@ -725,6 +749,20 @@ func (x *InputUsageInfo) GetEnumValues() []string { return nil } +func (x *InputUsageInfo) GetSubgraphIDs() []string { + if x != nil { + return x.SubgraphIDs + } + return nil +} + +func (x *InputUsageInfo) GetIsNull() bool { + if x != nil { + return x.IsNull + } + return false +} + type PublishGraphQLRequestMetricsRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -994,91 +1032,98 @@ var file_wg_cosmo_graphqlmetrics_v1_graphqlmetrics_proto_rawDesc = []byte{ 0x72, 0x65, 0x63, 0x74, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x16, 0x49, 0x6e, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x46, 0x69, 0x65, 0x6c, 0x64, - 0x22, 0x77, 0x0a, 0x11, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x55, 0x73, 0x61, 0x67, - 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x50, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x09, 0x52, 0x04, 0x50, 0x61, 0x74, 0x68, 0x12, 0x1a, 0x0a, 0x08, 0x54, 0x79, 0x70, - 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x54, 0x79, 0x70, - 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x4e, - 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, - 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x22, 0x94, 0x01, 0x0a, 0x0e, 0x49, 0x6e, - 0x70, 0x75, 0x74, 0x55, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, - 0x50, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x50, 0x61, 0x74, 0x68, - 0x12, 0x1a, 0x0a, 0x08, 0x54, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x08, 0x54, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, - 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x43, 0x6f, 0x75, - 0x6e, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, - 0x12, 0x1e, 0x0a, 0x0a, 0x45, 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x05, - 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x45, 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, - 0x22, 0x74, 0x0a, 0x23, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x47, 0x72, 0x61, 0x70, 0x68, + 0x22, 0xb1, 0x01, 0x0a, 0x11, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x55, 0x73, 0x61, + 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x50, 0x61, 0x74, 0x68, 0x18, 0x01, + 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x50, 0x61, 0x74, 0x68, 0x12, 0x1a, 0x0a, 0x08, 0x54, 0x79, + 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x54, 0x79, + 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x1c, 0x0a, 0x09, + 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x53, 0x75, + 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x49, 0x44, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, + 0x0b, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x49, 0x44, 0x73, 0x12, 0x16, 0x0a, 0x06, + 0x49, 0x73, 0x4e, 0x75, 0x6c, 0x6c, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x49, 0x73, + 0x4e, 0x75, 0x6c, 0x6c, 0x22, 0xce, 0x01, 0x0a, 0x0e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x55, 0x73, + 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x50, 0x61, 0x74, 0x68, 0x18, + 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x50, 0x61, 0x74, 0x68, 0x12, 0x1a, 0x0a, 0x08, 0x54, + 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x54, + 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x1c, 0x0a, + 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x45, + 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, + 0x0a, 0x45, 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x20, 0x0a, 0x0b, 0x53, + 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x49, 0x44, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x09, + 0x52, 0x0b, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x49, 0x44, 0x73, 0x12, 0x16, 0x0a, + 0x06, 0x49, 0x73, 0x4e, 0x75, 0x6c, 0x6c, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x49, + 0x73, 0x4e, 0x75, 0x6c, 0x6c, 0x22, 0x74, 0x0a, 0x23, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, + 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, + 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x4d, 0x0a, 0x0b, + 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x55, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x2b, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, + 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x53, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x55, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x0b, + 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x55, 0x73, 0x61, 0x67, 0x65, 0x22, 0x28, 0x0a, 0x26, 0x50, + 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, + 0x6f, 0x76, 0x65, 0x72, 0x61, 0x67, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x89, 0x01, 0x0a, 0x2d, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, + 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x4d, 0x0a, 0x0b, 0x53, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x55, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x77, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x58, 0x0a, 0x0b, 0x41, 0x67, 0x67, 0x72, 0x65, + 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x36, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, - 0x55, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x0b, 0x53, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x55, 0x73, 0x61, 0x67, 0x65, 0x22, 0x28, 0x0a, 0x26, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, - 0x68, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x76, 0x65, 0x72, 0x61, - 0x67, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x22, 0x89, 0x01, 0x0a, 0x2d, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, + 0x55, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0b, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x22, 0x30, 0x0a, 0x2e, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x58, 0x0a, 0x0b, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x36, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, - 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, - 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x55, 0x73, 0x61, 0x67, 0x65, - 0x49, 0x6e, 0x66, 0x6f, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, - 0x0b, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x30, 0x0a, 0x2e, - 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, - 0x64, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, - 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2a, 0x3a, - 0x0a, 0x0d, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x12, - 0x09, 0x0a, 0x05, 0x51, 0x55, 0x45, 0x52, 0x59, 0x10, 0x00, 0x12, 0x0c, 0x0a, 0x08, 0x4d, 0x55, - 0x54, 0x41, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x01, 0x12, 0x10, 0x0a, 0x0c, 0x53, 0x55, 0x42, 0x53, - 0x43, 0x52, 0x49, 0x50, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x02, 0x32, 0xf5, 0x02, 0x0a, 0x15, 0x47, - 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x53, 0x65, 0x72, - 0x76, 0x69, 0x63, 0x65, 0x12, 0x9e, 0x01, 0x0a, 0x15, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, - 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x12, 0x3f, + 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x2a, 0x3a, 0x0a, 0x0d, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x54, 0x79, 0x70, 0x65, 0x12, 0x09, 0x0a, 0x05, 0x51, 0x55, 0x45, 0x52, 0x59, 0x10, 0x00, 0x12, + 0x0c, 0x0a, 0x08, 0x4d, 0x55, 0x54, 0x41, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x01, 0x12, 0x10, 0x0a, + 0x0c, 0x53, 0x55, 0x42, 0x53, 0x43, 0x52, 0x49, 0x50, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x02, 0x32, + 0xf5, 0x02, 0x0a, 0x15, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x4d, 0x65, 0x74, 0x72, 0x69, + 0x63, 0x73, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x9e, 0x01, 0x0a, 0x15, 0x50, 0x75, + 0x62, 0x6c, 0x69, 0x73, 0x68, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x4d, 0x65, 0x74, 0x72, + 0x69, 0x63, 0x73, 0x12, 0x3f, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, + 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, + 0x2e, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x42, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, + 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, + 0x31, 0x2e, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x43, 0x6f, 0x76, 0x65, 0x72, 0x61, 0x67, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0xba, 0x01, 0x0a, 0x1f, 0x50, + 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, + 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x12, 0x49, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x75, 0x62, 0x6c, - 0x69, 0x73, 0x68, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x42, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, - 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x75, 0x62, - 0x6c, 0x69, 0x73, 0x68, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x76, - 0x65, 0x72, 0x61, 0x67, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0xba, 0x01, 0x0a, 0x1f, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, - 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, 0x47, 0x72, 0x61, 0x70, 0x68, - 0x51, 0x4c, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x12, 0x49, 0x2e, 0x77, 0x67, 0x2e, 0x63, + 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, 0x47, 0x72, 0x61, + 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, + 0x63, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x4a, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x64, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x1a, 0x4a, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, - 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, - 0x31, 0x2e, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, - 0x74, 0x65, 0x64, 0x47, 0x72, 0x61, 0x70, 0x68, 0x51, 0x4c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x22, 0x00, 0x42, 0x9b, 0x02, 0x0a, 0x1e, 0x63, 0x6f, 0x6d, 0x2e, 0x77, 0x67, 0x2e, 0x63, 0x6f, - 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, - 0x63, 0x73, 0x2e, 0x76, 0x31, 0x42, 0x13, 0x47, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, - 0x74, 0x72, 0x69, 0x63, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x59, 0x67, 0x69, - 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x77, 0x75, 0x6e, 0x64, 0x65, 0x72, 0x67, - 0x72, 0x61, 0x70, 0x68, 0x2f, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2f, 0x72, 0x6f, 0x75, 0x74, 0x65, - 0x72, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x77, 0x67, 0x2f, 0x63, - 0x6f, 0x73, 0x6d, 0x6f, 0x2f, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, - 0x69, 0x63, 0x73, 0x2f, 0x76, 0x31, 0x3b, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, - 0x74, 0x72, 0x69, 0x63, 0x73, 0x76, 0x31, 0xa2, 0x02, 0x03, 0x57, 0x43, 0x47, 0xaa, 0x02, 0x1a, - 0x57, 0x67, 0x2e, 0x43, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x47, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, - 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x56, 0x31, 0xca, 0x02, 0x1a, 0x57, 0x67, 0x5c, - 0x43, 0x6f, 0x73, 0x6d, 0x6f, 0x5c, 0x47, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, - 0x72, 0x69, 0x63, 0x73, 0x5c, 0x56, 0x31, 0xe2, 0x02, 0x26, 0x57, 0x67, 0x5c, 0x43, 0x6f, 0x73, - 0x6d, 0x6f, 0x5c, 0x47, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, - 0x73, 0x5c, 0x56, 0x31, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0xea, 0x02, 0x1d, 0x57, 0x67, 0x3a, 0x3a, 0x43, 0x6f, 0x73, 0x6d, 0x6f, 0x3a, 0x3a, 0x47, 0x72, - 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x3a, 0x3a, 0x56, 0x31, - 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x9b, 0x02, 0x0a, 0x1e, 0x63, 0x6f, 0x6d, 0x2e, + 0x77, 0x67, 0x2e, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, + 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x76, 0x31, 0x42, 0x13, 0x47, 0x72, 0x61, 0x70, + 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, + 0x01, 0x5a, 0x59, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x77, 0x75, + 0x6e, 0x64, 0x65, 0x72, 0x67, 0x72, 0x61, 0x70, 0x68, 0x2f, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2f, + 0x72, 0x6f, 0x75, 0x74, 0x65, 0x72, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x2f, 0x77, 0x67, 0x2f, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x2f, 0x67, 0x72, 0x61, 0x70, 0x68, 0x71, + 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2f, 0x76, 0x31, 0x3b, 0x67, 0x72, 0x61, 0x70, + 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x76, 0x31, 0xa2, 0x02, 0x03, 0x57, + 0x43, 0x47, 0xaa, 0x02, 0x1a, 0x57, 0x67, 0x2e, 0x43, 0x6f, 0x73, 0x6d, 0x6f, 0x2e, 0x47, 0x72, + 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x56, 0x31, 0xca, + 0x02, 0x1a, 0x57, 0x67, 0x5c, 0x43, 0x6f, 0x73, 0x6d, 0x6f, 0x5c, 0x47, 0x72, 0x61, 0x70, 0x68, + 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x5c, 0x56, 0x31, 0xe2, 0x02, 0x26, 0x57, + 0x67, 0x5c, 0x43, 0x6f, 0x73, 0x6d, 0x6f, 0x5c, 0x47, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, + 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x5c, 0x56, 0x31, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x1d, 0x57, 0x67, 0x3a, 0x3a, 0x43, 0x6f, 0x73, 0x6d, + 0x6f, 0x3a, 0x3a, 0x47, 0x72, 0x61, 0x70, 0x68, 0x71, 0x6c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, + 0x73, 0x3a, 0x3a, 0x56, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/router/pkg/graphqlschemausage/schemausage.go b/router/pkg/graphqlschemausage/schemausage.go index 75800cc89d..f52bc7e6ec 100644 --- a/router/pkg/graphqlschemausage/schemausage.go +++ b/router/pkg/graphqlschemausage/schemausage.go @@ -1,6 +1,64 @@ +// Package graphqlschemausage extracts schema usage metrics from GraphQL operations, +// associating each type, field, argument, and input with the SubgraphIDs that provide them. +// +// # Architecture +// +// The challenge: Execution plans optimize for execution, not analysis. Variables are resolved +// away, and only final field selections remain. To track usage, we must correlate three sources: +// +// 1. Execution Plan - contains field → subgraph mappings (via Source.IDs) +// 2. Operation AST - contains argument and variable usage +// 3. Variable Values - contains actual input data (nested objects, scalars, etc.) +// +// We extract subgraph IDs by building intermediate mappings: +// +// plan → field paths → variables → input fields +// +// This enables accurate federated schema usage tracking, showing which subgraphs serve which +// parts of queries, even through variables and deeply nested input objects. +// +// # Usage Tracking Types +// +// 1. TYPE & FIELD: Direct extraction from execution plan (has Source.IDs) +// 2. ARGUMENT: Correlate AST arguments with plan field paths +// 3. INPUT: Build field→subgraph and variable→subgraph maps, then traverse variable values +// +// # Input Null Tracking +// +// Input fields are ALWAYS tracked, even when null (explicit or implicit). This is critical for +// detecting breaking changes when optional fields become required. Each input usage includes an +// IsNull flag to indicate null propagation. When an input is null, the chain stops there—nested +// fields are not traversed since the parent is null. +// +// For list-typed fields: +// - Null list values (e.g., tags: null where tags: [String]) are tracked with IsNull=true +// - Empty lists (e.g., tags: []) are tracked with IsNull=false (field is used, just no elements) +// - Null elements within lists (e.g., tags: ["a", null, "b"]) are NOT individually tracked +// (the field-level usage already indicates the list type is being used) +// +// # Design Components +// +// The package uses a unified AST walk with pluggable collectors: +// +// - walkContext: Shared state for AST traversal (path, stacks, documents) +// - collector: Interface for components that collect data during the walk +// - unifiedVisitor: Single AST walker that delegates to multiple collectors +// +// Individual collectors handle specific concerns: +// - variableSubgraphCollector: Maps variables to subgraph IDs +// - argumentUsageCollector: Collects argument usage metrics +// - implicitInputCollector: Tracks implicit null input type arguments +// +// This design enables: +// - Single O(n) AST walk instead of multiple passes +// - Independent testing of each collector +// - Easy addition of new collectors without changing walk infrastructure package graphqlschemausage import ( + "bytes" + "strings" + "github.com/wundergraph/astjson" "github.com/wundergraph/graphql-go-tools/v2/pkg/ast" "github.com/wundergraph/graphql-go-tools/v2/pkg/astvisitor" @@ -11,6 +69,11 @@ import ( graphqlmetrics "github.com/wundergraph/cosmo/router/gen/proto/wg/cosmo/graphqlmetrics/v1" ) +// ============================================ +// Public API +// ============================================ + +// GetTypeFieldUsageInfo extracts type and field usage from the execution plan. func GetTypeFieldUsageInfo(operationPlan plan.Plan) []*TypeFieldUsageInfo { visitor := typeFieldUsageInfoVisitor{} switch p := operationPlan.(type) { @@ -22,6 +85,76 @@ func GetTypeFieldUsageInfo(operationPlan plan.Plan) []*TypeFieldUsageInfo { return visitor.typeFieldUsageInfo } +// GetArgumentUsageInfo extracts argument usage by correlating AST arguments with execution plan +// field paths. Includes null tracking for both inline and variable-based argument values. +// +// The variables parameter can be nil, which is treated as "no variables provided". When nil, +// null detection for variable-based arguments will default to false (cannot determine nullness). +func GetArgumentUsageInfo(operation, definition *ast.Document, variables *astjson.Value, operationPlan plan.Plan, remapVariables map[string]string) ([]*graphqlmetrics.ArgumentUsageInfo, error) { + fieldSubgraphMap := buildFieldSubgraphIDMap(operationPlan) + nullDetector := newNullValueDetector(operation, variables, remapVariables) + + // Create argument collector (no variable mapping needed for argument usage) + argCollector := newArgumentUsageCollector(operation, definition, nullDetector) + + // Run unified walk + ctx := newWalkContext(operation, definition) + err := runUnifiedWalk(ctx, argCollector) + if err != nil { + return nil, err + } + + // Finalize argument usage with subgraph IDs + argCollector.finalizeSubgraphIDs(fieldSubgraphMap) + + return argCollector.usage, nil +} + +// GetInputUsageInfo extracts input usage by traversing variable values. Tracks both explicit +// nulls ({"field": null}) and implicit nulls (missing fields) for breaking change detection. +// Also tracks input usage for implicitly null input type arguments (arguments not provided). +// +// The variables parameter can be nil, which is treated as "no variables provided". When nil, +// input object types are still tracked with IsNull=true for breaking change detection. +func GetInputUsageInfo(operation, definition *ast.Document, variables *astjson.Value, operationPlan plan.Plan, remapVariables map[string]string) ([]*graphqlmetrics.InputUsageInfo, error) { + fieldSubgraphMap := buildFieldSubgraphIDMap(operationPlan) + nullDetector := newNullValueDetector(operation, variables, remapVariables) + + // Create collectors + varCollector := newVariableSubgraphCollector(operation, fieldSubgraphMap) + inputCollector := newImplicitInputCollector(definition) + + // Run unified walk + ctx := newWalkContext(operation, definition) + err := runUnifiedWalk(ctx, varCollector, inputCollector) + if err != nil { + return nil, err + } + + // Build subgraph mapper from collected variable mappings + subgraphMapper := &subgraphMapper{ + fieldToSubgraphs: fieldSubgraphMap, + variableToSubgraphs: varCollector.variableMap, + } + + // Create input traverser and process variable definitions + traverser := newInputTraverser(definition, subgraphMapper) + + // Track input usage from variable definitions + for i := range operation.VariableDefinitions { + processVariableDefinition(traverser, operation, definition, variables, nullDetector, subgraphMapper, i) + } + + // Finalize implicit input usage with subgraph IDs + inputCollector.finalizeUsage(traverser, fieldSubgraphMap) + + return traverser.usage, nil +} + +// ============================================ +// Type Field Usage +// ============================================ + // An array of TypeFieldUsageInfo, with a method to convert it into a []*graphqlmetrics.TypeFieldUsageInfo type TypeFieldMetrics []*TypeFieldUsageInfo @@ -73,17 +206,16 @@ func (t *TypeFieldUsageInfo) IntoGraphQLMetrics() *graphqlmetrics.TypeFieldUsage } } +// typeFieldUsageInfoVisitor walks the execution plan to extract type and field usage. type typeFieldUsageInfoVisitor struct { typeFieldUsageInfo []*TypeFieldUsageInfo } +// visitNode recursively traverses the resolve tree to extract field usage info. func (p *typeFieldUsageInfoVisitor) visitNode(node resolve.Node, path []string) { switch t := node.(type) { case *resolve.Object: - // Pre-allocate the typeFieldUsageInfo slice with a reasonable capacity - // to reduce allocations during traversal if p.typeFieldUsageInfo == nil { - // Estimate: average query has ~20-50 fields p.typeFieldUsageInfo = make([]*TypeFieldUsageInfo, 0, 32) } @@ -92,7 +224,6 @@ func (p *typeFieldUsageInfoVisitor) visitNode(node resolve.Node, path []string) continue } - // create a new slice with exact capacity and copy elements pathCopy := make([]string, len(path)+1) copy(pathCopy, path) pathCopy[len(path)] = field.Info.Name @@ -120,155 +251,843 @@ func (p *typeFieldUsageInfoVisitor) visitNode(node resolve.Node, path []string) } } -func GetArgumentUsageInfo(operation, definition *ast.Document) ([]*graphqlmetrics.ArgumentUsageInfo, error) { +// ============================================ +// Unified AST Walk Infrastructure +// ============================================ + +// walkContext provides shared state for AST traversal. +// It manages common resources like path building and enclosing type tracking +// that multiple collectors need during the walk. +type walkContext struct { + walker *astvisitor.Walker + operation *ast.Document + definition *ast.Document + pathBuilder *pathBuilder + enclosingStack []ast.Node // Stack of enclosing type definitions + argumentsStack []map[string]struct{} // Stack tracking provided arguments per field +} + +func newWalkContext(operation, definition *ast.Document) *walkContext { + return &walkContext{ + operation: operation, + definition: definition, + pathBuilder: newPathBuilder(8), + enclosingStack: make([]ast.Node, 0, 8), + argumentsStack: make([]map[string]struct{}, 0, 8), + } +} + +// PathKey returns the current field path as a dot-separated string +func (c *walkContext) PathKey() string { + return c.pathBuilder.key() +} + +// CurrentEnclosingNode returns the enclosing type definition for the current field +func (c *walkContext) CurrentEnclosingNode() (ast.Node, bool) { + if len(c.enclosingStack) == 0 { + return ast.Node{}, false + } + return c.enclosingStack[len(c.enclosingStack)-1], true +} + +// CurrentProvidedArguments returns the set of provided arguments for the current field +func (c *walkContext) CurrentProvidedArguments() map[string]struct{} { + if len(c.argumentsStack) == 0 { + return nil + } + return c.argumentsStack[len(c.argumentsStack)-1] +} + +// TrackProvidedArgument records that an argument was provided for the current field +func (c *walkContext) TrackProvidedArgument(argName string) { + if len(c.argumentsStack) == 0 { + return + } + stackIdx := len(c.argumentsStack) - 1 + if c.argumentsStack[stackIdx] == nil { + c.argumentsStack[stackIdx] = make(map[string]struct{}, 4) + } + c.argumentsStack[stackIdx][argName] = struct{}{} +} + +// collector is the interface for components that collect data during AST traversal. +// Each collector handles a specific concern (variable mapping, argument usage, etc.) +// and can be tested independently. +type collector interface { + // EnterField is called when entering a field during AST traversal + EnterField(ctx *walkContext, ref int) + // LeaveField is called when leaving a field during AST traversal + LeaveField(ctx *walkContext, ref int) + // EnterArgument is called when entering an argument during AST traversal + EnterArgument(ctx *walkContext, ref int) +} + +// unifiedVisitor walks the AST once and delegates to multiple collectors. +// It manages the shared walk context and invokes collectors at each AST node. +type unifiedVisitor struct { + ctx *walkContext + collectors []collector +} + +func (v *unifiedVisitor) EnterField(ref int) { + // Update shared context + v.ctx.enclosingStack = append(v.ctx.enclosingStack, v.ctx.walker.EnclosingTypeDefinition) + v.ctx.argumentsStack = append(v.ctx.argumentsStack, nil) + fieldName := v.ctx.operation.FieldNameString(ref) + v.ctx.pathBuilder.push(fieldName) + + // Delegate to collectors + for _, c := range v.collectors { + c.EnterField(v.ctx, ref) + } +} + +func (v *unifiedVisitor) LeaveField(ref int) { + // Delegate to collectors first (they may need context state) + for _, c := range v.collectors { + c.LeaveField(v.ctx, ref) + } + + // Update shared context + v.ctx.pathBuilder.pop() + if len(v.ctx.enclosingStack) > 0 { + v.ctx.enclosingStack = v.ctx.enclosingStack[:len(v.ctx.enclosingStack)-1] + } + if len(v.ctx.argumentsStack) > 0 { + v.ctx.argumentsStack = v.ctx.argumentsStack[:len(v.ctx.argumentsStack)-1] + } +} + +func (v *unifiedVisitor) EnterArgument(ref int) { + // Track provided argument in shared context + argName := v.ctx.operation.ArgumentNameBytes(ref) + anc := v.ctx.walker.Ancestors[len(v.ctx.walker.Ancestors)-1] + if anc.Kind == ast.NodeKindField { + v.ctx.TrackProvidedArgument(string(argName)) + } + + // Delegate to collectors + for _, c := range v.collectors { + c.EnterArgument(v.ctx, ref) + } +} + +// runUnifiedWalk executes a single AST walk with the given collectors. +func runUnifiedWalk(ctx *walkContext, collectors ...collector) error { walker := astvisitor.NewWalker(48) - visitor := &argumentUsageInfoVisitor{ - definition: definition, - operation: operation, - walker: &walker, - // Pre-allocate with reasonable capacity to reduce allocations - usage: make([]*graphqlmetrics.ArgumentUsageInfo, 0, 16), + ctx.walker = &walker + + visitor := &unifiedVisitor{ + ctx: ctx, + collectors: collectors, } - walker.RegisterEnterArgumentVisitor(visitor) + walker.RegisterEnterFieldVisitor(visitor) + walker.RegisterLeaveFieldVisitor(visitor) + walker.RegisterEnterArgumentVisitor(visitor) + rep := &operationreport.Report{} - walker.Walk(operation, definition, rep) + walker.Walk(ctx.operation, ctx.definition, rep) if rep.HasErrors() { - return nil, rep + return rep + } + return nil +} + +// ============================================ +// Path Builder (Shared Infrastructure) +// ============================================ + +// pathBuilder provides reusable path stack operations for tracking field paths during traversal. +type pathBuilder struct { + stack []string +} + +func newPathBuilder(capacity int) *pathBuilder { + return &pathBuilder{stack: make([]string, 0, capacity)} +} + +func (p *pathBuilder) push(segment string) { + p.stack = append(p.stack, segment) +} + +func (p *pathBuilder) pop() { + if len(p.stack) > 0 { + p.stack = p.stack[:len(p.stack)-1] + } +} + +func (p *pathBuilder) key() string { + return strings.Join(p.stack, ".") +} + +// ============================================ +// Null Value Detector (Shared Infrastructure) +// ============================================ + +// nullValueDetector handles null detection for inline values, variables, and name remapping. +type nullValueDetector struct { + operation *ast.Document + variables *astjson.Value + remapVariables map[string]string +} + +func newNullValueDetector(operation *ast.Document, variables *astjson.Value, remapVariables map[string]string) *nullValueDetector { + return &nullValueDetector{ + operation: operation, + variables: variables, + remapVariables: remapVariables, } - return visitor.usage, nil } -type argumentUsageInfoVisitor struct { - walker *astvisitor.Walker - definition, operation *ast.Document - fieldEnclosingNode ast.Node - usage []*graphqlmetrics.ArgumentUsageInfo +// isValueNull checks if an argument/variable value is null +func (n *nullValueDetector) isValueNull(value ast.Value) bool { + if value.Kind == ast.ValueKindNull { + return true + } + + if value.Kind == ast.ValueKindVariable && n.variables != nil { + varName := n.operation.VariableValueNameString(value.Ref) + return n.isVariableNull(varName) + } + + return false } -func (a *argumentUsageInfoVisitor) EnterField(_ int) { - a.fieldEnclosingNode = a.walker.EnclosingTypeDefinition +// isVariableNull checks if a variable (by name) has a null value +func (n *nullValueDetector) isVariableNull(varName string) bool { + originalVarName := n.getOriginalVariableName(varName) + jsonField := n.variables.Get(originalVarName) + return jsonField != nil && jsonField.Type() == astjson.TypeNull } -func (a *argumentUsageInfoVisitor) EnterArgument(ref int) { +// getOriginalVariableName maps normalized variable names back to originals +func (n *nullValueDetector) getOriginalVariableName(varName string) string { + if n.remapVariables != nil { + if remapped, exists := n.remapVariables[varName]; exists { + return remapped + } + } + return varName +} + +// ============================================ +// Subgraph Mapper (Shared Infrastructure) +// ============================================ + +// subgraphMapper maps field paths and variable names to their subgraph IDs. +type subgraphMapper struct { + fieldToSubgraphs map[string][]string + variableToSubgraphs map[string][]string +} + +// getVariableSubgraphs returns subgraph IDs for a variable +func (s *subgraphMapper) getVariableSubgraphs(varName string) []string { + return s.variableToSubgraphs[varName] +} + +// buildFieldSubgraphIDMap extracts field → subgraph mappings from the execution plan. +func buildFieldSubgraphIDMap(operationPlan plan.Plan) map[string][]string { + collector := &subgraphIDCollector{ + fieldMap: make(map[string][]string), + pathStack: make([]string, 0, 8), + } + switch p := operationPlan.(type) { + case *plan.SynchronousResponsePlan: + collector.collectFromNode(p.Response.Data) + case *plan.SubscriptionResponsePlan: + collector.collectFromNode(p.Response.Response.Data) + } + return collector.fieldMap +} + +// subgraphIDCollector walks the execution plan to extract field path → subgraph ID mappings. +type subgraphIDCollector struct { + fieldMap map[string][]string + pathStack []string +} + +// collectFromNode recursively extracts field → subgraph ID mappings from the resolve tree. +func (c *subgraphIDCollector) collectFromNode(node resolve.Node) { + switch t := node.(type) { + case *resolve.Object: + for _, field := range t.Fields { + if field.Info == nil { + continue + } + c.pathStack = append(c.pathStack, field.Info.Name) + pathKey := strings.Join(c.pathStack, ".") + c.fieldMap[pathKey] = field.Info.Source.IDs + c.collectFromNode(field.Value) + c.pathStack = c.pathStack[:len(c.pathStack)-1] + } + case *resolve.Array: + c.collectFromNode(t.Item) + } +} + +// mergeSubgraphIDs combines two slices of subgraph IDs, removing duplicates. +func mergeSubgraphIDs(a, b []string) []string { + if len(a) == 0 { + return b + } + if len(b) == 0 { + return a + } + + seen := make(map[string]bool, len(a)+len(b)) + result := make([]string, 0, len(a)+len(b)) + + for _, id := range a { + if !seen[id] { + seen[id] = true + result = append(result, id) + } + } + + for _, id := range b { + if !seen[id] { + seen[id] = true + result = append(result, id) + } + } + + return result +} + +// ============================================ +// Variable Subgraph Collector +// ============================================ + +// variableSubgraphCollector maps variable names to subgraph IDs by tracking +// which fields use each variable. Implements the collector interface. +type variableSubgraphCollector struct { + operation *ast.Document + fieldSubgraphMap map[string][]string + variableMap map[string][]string +} + +func newVariableSubgraphCollector(operation *ast.Document, fieldSubgraphMap map[string][]string) *variableSubgraphCollector { + return &variableSubgraphCollector{ + operation: operation, + fieldSubgraphMap: fieldSubgraphMap, + variableMap: make(map[string][]string), + } +} + +func (v *variableSubgraphCollector) EnterField(_ *walkContext, _ int) { + // No action needed - context handles path building +} + +func (v *variableSubgraphCollector) LeaveField(_ *walkContext, _ int) { + // No action needed - context handles path building +} + +func (v *variableSubgraphCollector) EnterArgument(ctx *walkContext, ref int) { + arg := v.operation.Arguments[ref] + + if arg.Value.Kind != ast.ValueKindVariable { + return + } + + varName := v.operation.VariableValueNameString(arg.Value.Ref) + if varName == "" { + return + } + + pathKey := ctx.PathKey() + if subgraphIDs, exists := v.fieldSubgraphMap[pathKey]; exists { + v.variableMap[varName] = mergeSubgraphIDs(v.variableMap[varName], subgraphIDs) + } +} + +// ============================================ +// Argument Usage Collector +// ============================================ + +// argumentUsageCollector collects argument usage metrics during AST traversal. +// It tracks both provided arguments and implicit null arguments. +// Implements the collector interface. +type argumentUsageCollector struct { + operation *ast.Document + definition *ast.Document + nullDetector *nullValueDetector + usage []*graphqlmetrics.ArgumentUsageInfo + // Temporary storage for path keys, resolved after walk when subgraph map is complete + pathKeyPerUsage []string +} + +func newArgumentUsageCollector(operation, definition *ast.Document, nullDetector *nullValueDetector) *argumentUsageCollector { + return &argumentUsageCollector{ + operation: operation, + definition: definition, + nullDetector: nullDetector, + usage: make([]*graphqlmetrics.ArgumentUsageInfo, 0, 16), + pathKeyPerUsage: make([]string, 0, 16), + } +} + +func (a *argumentUsageCollector) EnterField(_ *walkContext, _ int) { + // No action needed - context handles path and stack management +} + +func (a *argumentUsageCollector) LeaveField(ctx *walkContext, ref int) { + // Track implicit null arguments (defined in schema but not provided) + a.trackImplicitNullArguments(ctx, ref) +} + +func (a *argumentUsageCollector) EnterArgument(ctx *walkContext, ref int) { argName := a.operation.ArgumentNameBytes(ref) - anc := a.walker.Ancestors[len(a.walker.Ancestors)-1] + anc := ctx.walker.Ancestors[len(ctx.walker.Ancestors)-1] if anc.Kind != ast.NodeKindField { return } + + enclosingNode, ok := ctx.CurrentEnclosingNode() + if !ok { + return + } + fieldName := a.operation.FieldNameBytes(anc.Ref) - enclosingTypeName := a.definition.NodeNameBytes(a.fieldEnclosingNode) - argDef := a.definition.NodeFieldDefinitionArgumentDefinitionByName(a.fieldEnclosingNode, fieldName, argName) + enclosingTypeName := a.definition.NodeNameBytes(enclosingNode) + argDef := a.definition.NodeFieldDefinitionArgumentDefinitionByName(enclosingNode, fieldName, argName) if argDef == -1 { return } argType := a.definition.InputValueDefinitionType(argDef) typeName := a.definition.ResolveTypeNameBytes(argType) + // Check if argument is null + arg := a.operation.Arguments[ref] + isNull := a.nullDetector.isValueNull(arg.Value) + + // Store usage info (subgraph IDs will be resolved later) a.usage = append(a.usage, &graphqlmetrics.ArgumentUsageInfo{ Path: []string{string(fieldName), string(argName)}, TypeName: string(enclosingTypeName), NamedType: string(typeName), + IsNull: isNull, }) + a.pathKeyPerUsage = append(a.pathKeyPerUsage, ctx.PathKey()) } -func GetInputUsageInfo(operation, definition *ast.Document, variables *astjson.Value) ([]*graphqlmetrics.InputUsageInfo, error) { - visitor := &inputUsageInfoVisitor{ - operation: operation, - definition: definition, - variables: variables, - // Pre-allocate with reasonable capacity to reduce allocations - usage: make([]*graphqlmetrics.InputUsageInfo, 0, 16), +// trackImplicitNullArguments tracks arguments defined in schema but not provided in operation. +func (a *argumentUsageCollector) trackImplicitNullArguments(ctx *walkContext, fieldRef int) { + enclosingNode, ok := ctx.CurrentEnclosingNode() + if !ok || enclosingNode.Kind == ast.NodeKindUnknown { + return } - for i := range operation.VariableDefinitions { - visitor.EnterVariableDefinition(i) + + fieldName := a.operation.FieldNameBytes(fieldRef) + // Skip introspection fields + if len(fieldName) > 1 && fieldName[0] == '_' && fieldName[1] == '_' { + return + } + + enclosingTypeName := a.definition.NodeNameBytes(enclosingNode) + + // Find all arguments defined for this field + argumentRefs := getFieldArgumentRefs(a.definition, enclosingNode, fieldName) + + // Get provided arguments from context + providedArguments := ctx.CurrentProvidedArguments() + + pathKey := ctx.PathKey() + + // Track arguments that are defined but not provided + for _, argRef := range argumentRefs { + argName := string(a.definition.InputValueDefinitionNameString(argRef)) + + if providedArguments != nil { + if _, provided := providedArguments[argName]; provided { + continue + } + } + + argType := a.definition.InputValueDefinitionType(argRef) + typeName := a.definition.ResolveTypeNameString(argType) + + a.usage = append(a.usage, &graphqlmetrics.ArgumentUsageInfo{ + Path: []string{string(fieldName), argName}, + TypeName: string(enclosingTypeName), + NamedType: typeName, + IsNull: true, + }) + a.pathKeyPerUsage = append(a.pathKeyPerUsage, pathKey) } - return visitor.usage, nil } -type inputUsageInfoVisitor struct { - definition, operation *ast.Document - variables *astjson.Value - usage []*graphqlmetrics.InputUsageInfo +// finalizeSubgraphIDs resolves subgraph IDs for all collected usage after the walk completes. +func (a *argumentUsageCollector) finalizeSubgraphIDs(fieldSubgraphMap map[string][]string) { + for i, pathKey := range a.pathKeyPerUsage { + a.usage[i].SubgraphIDs = fieldSubgraphMap[pathKey] + } } -func (v *inputUsageInfoVisitor) EnterVariableDefinition(ref int) { - varTypeRef := v.operation.VariableDefinitions[ref].Type - varName := v.operation.VariableValueNameString(v.operation.VariableDefinitions[ref].VariableValue.Ref) - varTypeName := v.operation.ResolveTypeNameString(varTypeRef) - jsonField := v.variables.Get(varName) - if jsonField == nil { +// ============================================ +// Implicit Input Collector +// ============================================ + +// implicitInputUsage stores data needed to finalize implicit input usage after the walk. +type implicitInputUsage struct { + typeName string + pathKey string +} + +// implicitInputCollector tracks implicit null input type arguments during AST traversal. +// Implements the collector interface. +type implicitInputCollector struct { + definition *ast.Document + implicitInputs []implicitInputUsage +} + +func newImplicitInputCollector(definition *ast.Document) *implicitInputCollector { + return &implicitInputCollector{ + definition: definition, + implicitInputs: make([]implicitInputUsage, 0, 8), + } +} + +func (c *implicitInputCollector) EnterField(_ *walkContext, _ int) { + // No action needed +} + +func (c *implicitInputCollector) LeaveField(ctx *walkContext, ref int) { + c.trackImplicitInputTypeArguments(ctx, ref) +} + +func (c *implicitInputCollector) EnterArgument(_ *walkContext, _ int) { + // Argument tracking is handled by walkContext +} + +func (c *implicitInputCollector) trackImplicitInputTypeArguments(ctx *walkContext, fieldRef int) { + enclosingNode, ok := ctx.CurrentEnclosingNode() + if !ok || enclosingNode.Kind == ast.NodeKindUnknown { return } - v.traverseVariable(jsonField, varName, varTypeName, "") + + fieldName := ctx.operation.FieldNameBytes(fieldRef) + // Skip introspection fields + if len(fieldName) > 1 && fieldName[0] == '_' && fieldName[1] == '_' { + return + } + + // Find all arguments defined for this field + argumentRefs := getFieldArgumentRefs(c.definition, enclosingNode, fieldName) + + providedArgs := ctx.CurrentProvidedArguments() + pathKey := ctx.PathKey() + + // Track input types for implicitly null arguments + for _, argRef := range argumentRefs { + argName := string(c.definition.InputValueDefinitionNameString(argRef)) + + if providedArgs != nil { + if _, provided := providedArgs[argName]; provided { + continue + } + } + + argType := c.definition.InputValueDefinitionType(argRef) + typeName := c.definition.ResolveTypeNameString(argType) + + // Check if this is an input object type + defNode, ok := c.definition.NodeByNameStr(typeName) + if !ok || defNode.Kind != ast.NodeKindInputObjectTypeDefinition { + continue + } + + c.implicitInputs = append(c.implicitInputs, implicitInputUsage{ + typeName: typeName, + pathKey: pathKey, + }) + } +} + +// getFieldArgumentRefs returns argument definition refs for a field in the schema. +// Shared helper used by both argumentUsageCollector and implicitInputCollector. +func getFieldArgumentRefs(definition *ast.Document, enclosingNode ast.Node, fieldName []byte) []int { + var argumentRefs []int + switch enclosingNode.Kind { + case ast.NodeKindObjectTypeDefinition: + fieldDefs := definition.ObjectTypeDefinitions[enclosingNode.Ref].FieldsDefinition.Refs + for _, fieldDefRef := range fieldDefs { + fieldDef := definition.FieldDefinitions[fieldDefRef] + if bytes.Equal(definition.FieldDefinitionNameBytes(fieldDefRef), fieldName) { + if fieldDef.HasArgumentsDefinitions { + argumentRefs = fieldDef.ArgumentsDefinition.Refs + } + break + } + } + case ast.NodeKindInterfaceTypeDefinition: + fieldDefs := definition.InterfaceTypeDefinitions[enclosingNode.Ref].FieldsDefinition.Refs + for _, fieldDefRef := range fieldDefs { + fieldDef := definition.FieldDefinitions[fieldDefRef] + if bytes.Equal(definition.FieldDefinitionNameBytes(fieldDefRef), fieldName) { + if fieldDef.HasArgumentsDefinitions { + argumentRefs = fieldDef.ArgumentsDefinition.Refs + } + break + } + } + } + return argumentRefs +} + +// finalizeUsage adds implicit input usage to the traverser with resolved subgraph IDs. +func (c *implicitInputCollector) finalizeUsage(traverser *inputTraverser, fieldSubgraphMap map[string][]string) { + for _, input := range c.implicitInputs { + subgraphIDs := fieldSubgraphMap[input.pathKey] + traverser.appendUniqueUsage(&graphqlmetrics.InputUsageInfo{ + NamedType: input.typeName, + Path: []string{input.typeName}, + SubgraphIDs: subgraphIDs, + IsNull: true, + }) + } +} + +// ============================================ +// Input Type Resolver +// ============================================ + +// inputTypeResolver resolves input object field definitions from the schema. +type inputTypeResolver struct { + definition *ast.Document +} + +func newInputTypeResolver(definition *ast.Document) *inputTypeResolver { + return &inputTypeResolver{definition: definition} +} + +// resolveInputFields returns all field definitions for an input object type +func (r *inputTypeResolver) resolveInputFields(typeName string) []inputFieldInfo { + defNode, ok := r.definition.NodeByNameStr(typeName) + if !ok || defNode.Kind != ast.NodeKindInputObjectTypeDefinition { + return nil + } + + inputObjectDef := r.definition.InputObjectTypeDefinitions[defNode.Ref] + fields := make([]inputFieldInfo, 0, len(inputObjectDef.InputFieldsDefinition.Refs)) + + for _, fieldRef := range inputObjectDef.InputFieldsDefinition.Refs { + fieldDef := r.definition.InputValueDefinitions[fieldRef] + fields = append(fields, inputFieldInfo{ + name: string(r.definition.Input.ByteSlice(fieldDef.Name)), + typeName: r.definition.ResolveTypeNameString(fieldDef.Type), + }) + } + + return fields +} + +// getNodeRef returns the node ref for a type by name +func (r *inputTypeResolver) getNodeRef(typeName string) int { + if node, ok := r.definition.NodeByNameStr(typeName); ok { + return node.Ref + } + return -1 +} + +// inputFieldInfo represents an input object field's name and type. +type inputFieldInfo struct { + name string + typeName string +} + +// ============================================ +// Input Traverser +// ============================================ + +// inputTraverser traverses JSON variable values to extract input usage metrics. +// Tracks explicit nulls, implicit nulls (missing fields), and enum values. +type inputTraverser struct { + definition *ast.Document + typeResolver *inputTypeResolver + subgraphMapper *subgraphMapper + currentVariableName string + usage []*graphqlmetrics.InputUsageInfo +} + +func newInputTraverser(definition *ast.Document, subgraphMapper *subgraphMapper) *inputTraverser { + return &inputTraverser{ + definition: definition, + typeResolver: newInputTypeResolver(definition), + subgraphMapper: subgraphMapper, + usage: make([]*graphqlmetrics.InputUsageInfo, 0, 16), + } } -func (v *inputUsageInfoVisitor) traverseVariable(jsonValue *astjson.Value, fieldName, typeName, parentTypeName string) { - defNode, ok := v.definition.NodeByNameStr(typeName) +// traverse handles input value traversal, dispatching to specialized handlers by type kind. +// Implements null propagation: when isNull is true, tracking stops at this level. +func (t *inputTraverser) traverse(jsonValue *astjson.Value, fieldName, typeName, parentTypeName string, isNull bool) { + usageInfo := t.createUsageInfo(fieldName, typeName, parentTypeName, isNull) + + defNode, ok := t.definition.NodeByNameStr(typeName) if !ok { + // Built-in scalar + t.appendUniqueUsage(usageInfo) return } - usageInfo := &graphqlmetrics.InputUsageInfo{ + + // If null, track and stop propagation + if isNull { + t.appendUniqueUsage(usageInfo) + return + } + + // Dispatch based on type kind + switch defNode.Kind { + case ast.NodeKindInputObjectTypeDefinition: + t.traverseInputObject(jsonValue, fieldName, typeName, parentTypeName, usageInfo) + case ast.NodeKindEnumTypeDefinition: + t.traverseEnum(jsonValue, usageInfo) + case ast.NodeKindScalarTypeDefinition: + // Custom scalar - just track + } + + t.appendUniqueUsage(usageInfo) +} + +// createUsageInfo builds usage info with path, type names, and subgraph IDs. +func (t *inputTraverser) createUsageInfo(fieldName, typeName, parentTypeName string, isNull bool) *graphqlmetrics.InputUsageInfo { + info := &graphqlmetrics.InputUsageInfo{ NamedType: typeName, + IsNull: isNull, } + if parentTypeName != "" { - usageInfo.TypeName = parentTypeName - // Pre-allocate Path slice with exact capacity - usageInfo.Path = []string{parentTypeName, fieldName} + info.TypeName = parentTypeName + info.Path = []string{parentTypeName, fieldName} + } else { + // For root input types, set Path to identify the type itself + info.Path = []string{typeName} } - switch defNode.Kind { - case ast.NodeKindInputObjectTypeDefinition: - switch jsonValue.Type() { - case astjson.TypeArray: - for _, arrayValue := range jsonValue.GetArray() { - v.traverseVariable(arrayValue, fieldName, typeName, parentTypeName) - } - case astjson.TypeObject: - o := jsonValue.GetObject() - o.Visit(func(key []byte, value *astjson.Value) { - fieldRef := v.definition.InputObjectTypeDefinitionInputValueDefinitionByName(defNode.Ref, key) - if fieldRef == -1 { - return - } - fieldTypeName := v.definition.ResolveTypeNameString(v.definition.InputValueDefinitions[fieldRef].Type) - if v.definition.TypeIsList(v.definition.InputValueDefinitions[fieldRef].Type) { - for _, arrayValue := range value.GetArray() { - v.traverseVariable(arrayValue, string(key), fieldTypeName, typeName) - } - } else { - v.traverseVariable(value, string(key), fieldTypeName, typeName) - } - }) + // Get subgraph IDs + if t.currentVariableName != "" { + info.SubgraphIDs = t.subgraphMapper.getVariableSubgraphs(t.currentVariableName) + } + + return info +} + +// traverseInputObject handles input object traversal with implicit null tracking +func (t *inputTraverser) traverseInputObject(jsonValue *astjson.Value, fieldName, typeName, parentTypeName string, usageInfo *graphqlmetrics.InputUsageInfo) { + switch jsonValue.Type() { + case astjson.TypeArray: + // Note: arrays at this level mean list of input objects (e.g., [InputType]) + // If we reach here, the array itself is not null, so iterate normally + for _, arrayValue := range jsonValue.GetArray() { + t.traverse(arrayValue, fieldName, typeName, parentTypeName, false) } + case astjson.TypeObject: + t.processObjectFields(jsonValue, typeName, usageInfo.SubgraphIDs) + } +} - case ast.NodeKindEnumTypeDefinition: - switch jsonValue.Type() { - case astjson.TypeString: - usageInfo.EnumValues = []string{string(jsonValue.GetStringBytes())} - case astjson.TypeArray: - arr := jsonValue.GetArray() - // Pre-allocate EnumValues slice with exact capacity - usageInfo.EnumValues = make([]string, len(arr)) - for i, arrayValue := range arr { - usageInfo.EnumValues[i] = string(arrayValue.GetStringBytes()) - } +// processObjectFields processes present fields and tracks implicit nulls (missing fields). +func (t *inputTraverser) processObjectFields(jsonValue *astjson.Value, parentTypeName string, subgraphIDs []string) { + o := jsonValue.GetObject() + presentFields := make(map[string]bool, 8) // Capacity hint: most input objects have <8 fields + + // Process present fields + o.Visit(func(key []byte, value *astjson.Value) { + keyStr := string(key) + presentFields[keyStr] = true + t.processField(keyStr, value, parentTypeName) + }) + + // Process missing fields (implicit nulls) + allFields := t.typeResolver.resolveInputFields(parentTypeName) + for _, fieldInfo := range allFields { + if !presentFields[fieldInfo.name] { + t.trackImplicitNull(fieldInfo, parentTypeName, subgraphIDs) } } +} - v.appendUniqueUsage(usageInfo) +// processField handles a single field from the JSON object +func (t *inputTraverser) processField(fieldName string, value *astjson.Value, parentTypeName string) { + nodeRef := t.typeResolver.getNodeRef(parentTypeName) + if nodeRef == -1 { + return + } + + fieldRef := t.definition.InputObjectTypeDefinitionInputValueDefinitionByName(nodeRef, []byte(fieldName)) + if fieldRef == -1 { + return + } + + fieldDef := t.definition.InputValueDefinitions[fieldRef] + fieldTypeName := t.definition.ResolveTypeNameString(fieldDef.Type) + fieldIsNull := value.Type() == astjson.TypeNull + + if t.definition.TypeIsList(fieldDef.Type) { + // If the list field itself is null, record a single null usage and stop. + // This is critical for breaking change detection (e.g., [String] -> [String]!). + if fieldIsNull { + t.traverse(value, fieldName, fieldTypeName, parentTypeName, true) + return + } + + // List is not null - iterate through elements + arr := value.GetArray() + if len(arr) == 0 { + // Empty list - still track the field usage for breaking change detection. + // The schema dependency exists even if no elements are provided. + t.traverse(value, fieldName, fieldTypeName, parentTypeName, false) + return + } + for _, arrayValue := range arr { + t.traverse(arrayValue, fieldName, fieldTypeName, parentTypeName, false) + } + } else { + t.traverse(value, fieldName, fieldTypeName, parentTypeName, fieldIsNull) + } } -func (v *inputUsageInfoVisitor) appendUniqueUsage(info *graphqlmetrics.InputUsageInfo) { - for _, u := range v.usage { - if v.infoEquals(u, info) { +// trackImplicitNull creates usage info for fields not present in JSON (implicitly null). +func (t *inputTraverser) trackImplicitNull(fieldInfo inputFieldInfo, parentTypeName string, subgraphIDs []string) { + implicitUsageInfo := &graphqlmetrics.InputUsageInfo{ + NamedType: fieldInfo.typeName, + TypeName: parentTypeName, + Path: []string{parentTypeName, fieldInfo.name}, + IsNull: true, + SubgraphIDs: subgraphIDs, + } + t.appendUniqueUsage(implicitUsageInfo) +} + +// traverseEnum handles enum value extraction +func (t *inputTraverser) traverseEnum(jsonValue *astjson.Value, usageInfo *graphqlmetrics.InputUsageInfo) { + switch jsonValue.Type() { + case astjson.TypeString: + usageInfo.EnumValues = []string{string(jsonValue.GetStringBytes())} + case astjson.TypeArray: + arr := jsonValue.GetArray() + usageInfo.EnumValues = make([]string, len(arr)) + for i, arrayValue := range arr { + usageInfo.EnumValues[i] = string(arrayValue.GetStringBytes()) + } + } +} + +// appendUniqueUsage adds usage info if not already present. +// Note: Uses O(n) linear scan for deduplication. For very large operations with thousands +// of input fields, consider using a map-based approach for O(1) lookups. +func (t *inputTraverser) appendUniqueUsage(info *graphqlmetrics.InputUsageInfo) { + for _, u := range t.usage { + if t.infoEquals(u, info) { return } } - v.usage = append(v.usage, info) + t.usage = append(t.usage, info) } -func (v *inputUsageInfoVisitor) infoEquals(a, b *graphqlmetrics.InputUsageInfo) bool { +// infoEquals checks deep equality between two InputUsageInfo instances. +func (t *inputTraverser) infoEquals(a, b *graphqlmetrics.InputUsageInfo) bool { if a.Count != b.Count { return false } @@ -278,6 +1097,9 @@ func (v *inputUsageInfoVisitor) infoEquals(a, b *graphqlmetrics.InputUsageInfo) if a.TypeName != b.TypeName { return false } + if a.IsNull != b.IsNull { + return false + } if len(a.Path) != len(b.Path) { return false } @@ -294,5 +1116,65 @@ func (v *inputUsageInfoVisitor) infoEquals(a, b *graphqlmetrics.InputUsageInfo) return false } } + if len(a.SubgraphIDs) != len(b.SubgraphIDs) { + return false + } + for i := range a.SubgraphIDs { + if a.SubgraphIDs[i] != b.SubgraphIDs[i] { + return false + } + } return true } + +// ============================================ +// Variable Definition Processing +// ============================================ + +// processVariableDefinition processes a variable definition and initiates input traversal. +// Tracks input usage even when the variable is not provided in the variables JSON (empty variables). +// Handles nil variables gracefully by treating them as "no variables provided". +func processVariableDefinition(traverser *inputTraverser, operation, definition *ast.Document, variables *astjson.Value, nullDetector *nullValueDetector, subgraphMapper *subgraphMapper, ref int) { + varDef := operation.VariableDefinitions[ref] + varTypeRef := varDef.Type + varTypeName := operation.ResolveTypeNameString(varTypeRef) + + // Get normalized variable name from AST + normalizedVarName := operation.VariableValueNameString(varDef.VariableValue.Ref) + + // Map back to original name for JSON lookup + originalVarName := nullDetector.getOriginalVariableName(normalizedVarName) + + // Look up the variable value (treat nil variables as "no variables provided") + var jsonField *astjson.Value + if variables != nil { + jsonField = variables.Get(originalVarName) + } + + if jsonField == nil { + // Variable is not provided in variables JSON (or variables is nil) - still track input type usage if it's an input object type + // This is important for breaking change detection + defNode, ok := definition.NodeByNameStr(varTypeName) + if ok && defNode.Kind == ast.NodeKindInputObjectTypeDefinition { + // Use normalized name for subgraph lookup + traverser.currentVariableName = normalizedVarName + subgraphIDs := subgraphMapper.getVariableSubgraphs(normalizedVarName) + + // Track the input type as implicitly null (variable not provided) + traverser.appendUniqueUsage(&graphqlmetrics.InputUsageInfo{ + NamedType: varTypeName, + Path: []string{varTypeName}, + SubgraphIDs: subgraphIDs, + IsNull: true, // Variable not provided + }) + } + return + } + + // Use normalized name for subgraph lookup + traverser.currentVariableName = normalizedVarName + + // Always track input usage, even when null + isNull := jsonField.Type() == astjson.TypeNull + traverser.traverse(jsonField, originalVarName, varTypeName, "", isNull) +} diff --git a/router/pkg/graphqlschemausage/schemausage_bench_test.go b/router/pkg/graphqlschemausage/schemausage_bench_test.go index efe458ebe9..3f732a1d0e 100644 --- a/router/pkg/graphqlschemausage/schemausage_bench_test.go +++ b/router/pkg/graphqlschemausage/schemausage_bench_test.go @@ -1,6 +1,7 @@ package graphqlschemausage import ( + "fmt" "testing" "github.com/stretchr/testify/require" @@ -61,7 +62,7 @@ func setupBenchmark(b *testing.B) (plan.Plan, *ast.Document, *ast.Document, *ast require.False(b, report.HasErrors()) // Create data source configuration - dsCfg, err := plan.NewDataSourceConfiguration( + dsCfg, err := plan.NewDataSourceConfiguration[any]( "https://swapi.dev/api", &FakeFactory[any]{upstreamSchema: &def}, &plan.DataSourceMetadata{ @@ -107,6 +108,7 @@ func setupBenchmark(b *testing.B) (plan.Plan, *ast.Document, *ast.Document, *ast func BenchmarkGetTypeFieldUsageInfo(b *testing.B) { generatedPlan, _, _, _ := setupBenchmark(b) + b.ResetTimer() b.ReportAllocs() for b.Loop() { @@ -117,12 +119,13 @@ func BenchmarkGetTypeFieldUsageInfo(b *testing.B) { // BenchmarkGetArgumentUsageInfo measures memory allocations when extracting argument usage func BenchmarkGetArgumentUsageInfo(b *testing.B) { - _, operation, definition, _ := setupBenchmark(b) + generatedPlan, operation, definition, variables := setupBenchmark(b) + b.ResetTimer() b.ReportAllocs() for b.Loop() { - result, err := GetArgumentUsageInfo(operation, definition) + result, err := GetArgumentUsageInfo(operation, definition, variables, generatedPlan, nil) if err != nil { b.Fatal(err) } @@ -132,12 +135,13 @@ func BenchmarkGetArgumentUsageInfo(b *testing.B) { // BenchmarkGetInputUsageInfo measures memory allocations when extracting input variable usage func BenchmarkGetInputUsageInfo(b *testing.B) { - _, operation, definition, variables := setupBenchmark(b) + generatedPlan, operation, definition, variables := setupBenchmark(b) + b.ResetTimer() b.ReportAllocs() for b.Loop() { - result, err := GetInputUsageInfo(operation, definition, variables) + result, err := GetInputUsageInfo(operation, definition, variables, generatedPlan, nil) if err != nil { b.Fatal(err) } @@ -150,6 +154,7 @@ func BenchmarkIntoGraphQLMetrics(b *testing.B) { generatedPlan, _, _, _ := setupBenchmark(b) typeFieldMetrics := TypeFieldMetrics(GetTypeFieldUsageInfo(generatedPlan)) + b.ResetTimer() b.ReportAllocs() for b.Loop() { @@ -163,6 +168,7 @@ func BenchmarkIntoGraphQLMetrics(b *testing.B) { func BenchmarkSchemaUsageEndToEnd(b *testing.B) { generatedPlan, operation, definition, variables := setupBenchmark(b) + b.ResetTimer() b.ReportAllocs() for b.Loop() { @@ -173,17 +179,159 @@ func BenchmarkSchemaUsageEndToEnd(b *testing.B) { _ = TypeFieldMetrics(typeFieldUsage).IntoGraphQLMetrics() // Extract argument usage - argUsage, err := GetArgumentUsageInfo(operation, definition) + argUsage, err := GetArgumentUsageInfo(operation, definition, variables, generatedPlan, nil) if err != nil { b.Fatal(err) } _ = argUsage // Extract input variable usage - inputUsage, err := GetInputUsageInfo(operation, definition, variables) + inputUsage, err := GetInputUsageInfo(operation, definition, variables, generatedPlan, nil) if err != nil { b.Fatal(err) } _ = inputUsage } } + +// setupLargeFieldsBenchmark creates a schema and query with many unique fields +// to test schema usage efficiency at scale +func setupLargeFieldsBenchmark(b *testing.B, fieldCount int) (plan.Plan, *ast.Document, *ast.Document, *astjson.Value) { + b.Helper() + + // Generate schema with many fields + schemaBuilder := ` + type Query { + user(id: ID!): User + } + + type User { + id: ID! + name: String! + ` + + // Add many scalar fields + for i := 0; i < fieldCount; i++ { + fieldName := fmt.Sprintf("field%d", i) + schemaBuilder += "\n\t\t\t" + fieldName + ": String" + } + + schemaBuilder += "\n\t\t}" + + // Generate query selecting all fields + queryBuilder := "query GetUser($id: ID!) {\n\t\tuser(id: $id) {\n\t\t\tid\n\t\t\tname\n" + for i := 0; i < fieldCount; i++ { + fieldName := fmt.Sprintf("field%d", i) + queryBuilder += "\t\t\t" + fieldName + "\n" + } + queryBuilder += "\t\t}\n\t}" + + variables := `{"id":"123"}` + + // Parse schema + def, rep := astparser.ParseGraphqlDocumentString(schemaBuilder) + require.False(b, rep.HasErrors()) + + // Parse operation + op, rep := astparser.ParseGraphqlDocumentString(queryBuilder) + require.False(b, rep.HasErrors()) + + // Merge and normalize + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(b, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(b, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(b, report.HasErrors()) + + // Build field names list for metadata + fieldNames := []string{"id", "name"} + for i := 0; i < fieldCount; i++ { + fieldName := fmt.Sprintf("field%d", i) + fieldNames = append(fieldNames, fieldName) + } + + // Create data source configuration + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "https://api.example.com", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"user"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "User", FieldNames: fieldNames}, + }, + }, + nil, + ) + require.NoError(b, err) + + // Create planner + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(b, err) + + // Generate plan + generatedPlan := planner.Plan(&op, &def, "GetUser", report) + require.False(b, report.HasErrors()) + + // Parse variables + vars, err := astjson.Parse(variables) + require.NoError(b, err) + + return generatedPlan, &op, &def, vars +} + +// BenchmarkSchemaUsageWithManyFields tests performance with varying numbers of unique fields +// This helps identify O(n²) bottlenecks in duplicate detection and path allocation +func BenchmarkSchemaUsageWithManyFields(b *testing.B) { + testCases := []struct { + name string + fieldCount int + }{ + {"10_fields", 10}, + {"50_fields", 50}, + {"100_fields", 100}, + {"250_fields", 250}, + {"500_fields", 500}, + } + + for _, tc := range testCases { + b.Run(tc.name, func(b *testing.B) { + generatedPlan, operation, definition, variables := setupLargeFieldsBenchmark(b, tc.fieldCount) + + b.ResetTimer() + b.ReportAllocs() + + for b.Loop() { + // Extract type field usage + typeFieldUsage := GetTypeFieldUsageInfo(generatedPlan) + + // Extract argument usage + argUsage, err := GetArgumentUsageInfo(operation, definition, variables, generatedPlan, nil) + if err != nil { + b.Fatal(err) + } + + // Extract input variable usage + inputUsage, err := GetInputUsageInfo(operation, definition, variables, generatedPlan, nil) + if err != nil { + b.Fatal(err) + } + + // Prevent compiler optimization + _ = typeFieldUsage + _ = argUsage + _ = inputUsage + } + }) + } +} diff --git a/router/pkg/graphqlschemausage/schemausage_test.go b/router/pkg/graphqlschemausage/schemausage_test.go index 62cf52b6ae..a51e1c0125 100644 --- a/router/pkg/graphqlschemausage/schemausage_test.go +++ b/router/pkg/graphqlschemausage/schemausage_test.go @@ -4,6 +4,7 @@ import ( "bytes" "context" "encoding/json" + "strings" "testing" "github.com/jensneuse/abstractlogger" @@ -210,9 +211,9 @@ func TestGetSchemaUsageInfo(t *testing.T) { assert.NoError(t, err) fieldUsageInfo := GetTypeFieldUsageInfo(generatedPlan) - argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def) + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, merged, generatedPlan, nil) assert.NoError(t, err) - inputUsageInfo, err := GetInputUsageInfo(&op, &def, merged) + inputUsageInfo, err := GetInputUsageInfo(&op, &def, merged, generatedPlan, nil) assert.NoError(t, err) subscription := &plan.SubscriptionResponsePlan{ @@ -222,9 +223,9 @@ func TestGetSchemaUsageInfo(t *testing.T) { } subscriptionFieldUsageInfo := GetTypeFieldUsageInfo(subscription) - subscriptionArgumentUsageInfo, err := GetArgumentUsageInfo(&op, &def) + subscriptionArgumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, merged, subscription, nil) assert.NoError(t, err) - subscriptionInputUsageInfo, err := GetInputUsageInfo(&op, &def, merged) + subscriptionInputUsageInfo, err := GetInputUsageInfo(&op, &def, merged, subscription, nil) assert.NoError(t, err) assert.Equal(t, fieldUsageInfo, subscriptionFieldUsageInfo) @@ -284,82 +285,135 @@ func TestGetSchemaUsageInfo(t *testing.T) { expectedArgumentUsageInfo := []*graphqlmetricsv1.ArgumentUsageInfo{ { - TypeName: "Query", - NamedType: "String", - Path: []string{"searchResults", "name"}, + TypeName: "Query", + NamedType: "String", + Path: []string{"searchResults", "name"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, { - TypeName: "Query", - NamedType: "SearchFilter", - Path: []string{"searchResults", "filter"}, + TypeName: "Query", + NamedType: "SearchFilter", + Path: []string{"searchResults", "filter"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, { - TypeName: "Query", - NamedType: "SearchFilter", - Path: []string{"searchResults", "filter2"}, + TypeName: "Query", + NamedType: "SearchFilter", + Path: []string{"searchResults", "filter2"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, { - TypeName: "Query", - NamedType: "Episode", - Path: []string{"searchResults", "enumValue"}, + TypeName: "Query", + NamedType: "Episode", + Path: []string{"searchResults", "enumValue"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, { - TypeName: "Query", - NamedType: "Episode", - Path: []string{"searchResults", "enumList"}, + TypeName: "Query", + NamedType: "Episode", + Path: []string{"searchResults", "enumList"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, { - TypeName: "Query", - NamedType: "Episode", - Path: []string{"searchResults", "enumList2"}, + TypeName: "Query", + NamedType: "Episode", + Path: []string{"searchResults", "enumList2"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, { - TypeName: "Query", - NamedType: "SearchFilter", - Path: []string{"searchResults", "filterList"}, + TypeName: "Query", + NamedType: "SearchFilter", + Path: []string{"searchResults", "filterList"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, { - TypeName: "Human", - NamedType: "String", - Path: []string{"inlineName", "name"}, + TypeName: "Human", + NamedType: "String", + Path: []string{"inlineName", "name"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, } expectedInputUsageInfo := []graphqlmetricsv1.InputUsageInfo{ { - NamedType: "String", + NamedType: "String", + Path: []string{"String"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, + }, + { + NamedType: "Episode", + TypeName: "SearchFilter", + EnumValues: []string{"NEWHOPE"}, + Path: []string{"SearchFilter", "enumField"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, + }, + { + // filter2 has enumField but excludeName is implicitly null + NamedType: "String", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "excludeName"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: true, }, { - NamedType: "Episode", - TypeName: "SearchFilter", - EnumValues: []string{"NEWHOPE"}, - Path: []string{"SearchFilter", "enumField"}, + NamedType: "SearchFilter", + Path: []string{"SearchFilter"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, { - NamedType: "SearchFilter", + NamedType: "Episode", + Path: []string{"Episode"}, + EnumValues: []string{"EMPIRE"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, { - NamedType: "Episode", - EnumValues: []string{"EMPIRE"}, + NamedType: "Episode", + Path: []string{"Episode"}, + EnumValues: []string{"JEDI", "EMPIRE", "NEWHOPE"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, { - NamedType: "Episode", - EnumValues: []string{"JEDI", "EMPIRE", "NEWHOPE"}, + NamedType: "String", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "excludeName"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, { - NamedType: "String", - TypeName: "SearchFilter", - Path: []string{"SearchFilter", "excludeName"}, + // filterList[0] has excludeName but enumField is implicitly null + NamedType: "Episode", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "enumField"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: true, }, { - NamedType: "Episode", - TypeName: "SearchFilter", - EnumValues: []string{"JEDI"}, - Path: []string{"SearchFilter", "enumField"}, + NamedType: "Episode", + TypeName: "SearchFilter", + EnumValues: []string{"JEDI"}, + Path: []string{"SearchFilter", "enumField"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, { - NamedType: "Episode", - EnumValues: []string{"JEDI", "EMPIRE"}, + NamedType: "Episode", + Path: []string{"Episode"}, + EnumValues: []string{"JEDI", "EMPIRE"}, + SubgraphIDs: []string{"https://swapi.dev/api"}, + IsNull: false, }, } @@ -456,9 +510,9 @@ func TestGetSchemaUsageInfoInterfaces(t *testing.T) { } fieldUsageInfo := GetTypeFieldUsageInfo(generatedPlan) - argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def) + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, astjson.MustParse(`{}`), generatedPlan, nil) assert.NoError(t, err) - inputUsageInfo, err := GetInputUsageInfo(&op, &def, astjson.MustParse(`{}`)) + inputUsageInfo, err := GetInputUsageInfo(&op, &def, astjson.MustParse(`{}`), generatedPlan, nil) assert.NoError(t, err) subscription := &plan.SubscriptionResponsePlan{ @@ -468,9 +522,9 @@ func TestGetSchemaUsageInfoInterfaces(t *testing.T) { } subscriptionFieldUsageInfo := GetTypeFieldUsageInfo(subscription) - subscriptionArgumentUsageInfo, err := GetArgumentUsageInfo(&op, &def) + subscriptionArgumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, astjson.MustParse(`{}`), subscription, nil) assert.NoError(t, err) - subscriptionInputUsageInfo, err := GetInputUsageInfo(&op, &def, astjson.MustParse(`{}`)) + subscriptionInputUsageInfo, err := GetInputUsageInfo(&op, &def, astjson.MustParse(`{}`), subscription, nil) assert.NoError(t, err) assert.Equal(t, fieldUsageInfo, subscriptionFieldUsageInfo) @@ -511,6 +565,3373 @@ func TestGetSchemaUsageInfoInterfaces(t *testing.T) { } } +// TestInputUsageWithNullVariables verifies that null variable values are tracked with IsNull flag +func TestInputUsageWithNullVariables(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + findEmployees(criteria: EmployeeSearchInput): [Employee!]! + } + + type Employee { + id: ID! + } + + input EmployeeSearchInput { + hasPets: Boolean + department: String + } + ` + + operation := ` + query FindEmployees($criteria: EmployeeSearchInput) { + findEmployees(criteria: $criteria) { + id + } + } + ` + + // Test with null value + variables := `{"criteria": null}` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "employees-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"findEmployees"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Employee", FieldNames: []string{"id"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "FindEmployees", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Should track null value with IsNull flag set to true + expectedUsage := []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "EmployeeSearchInput", + Path: []string{"EmployeeSearchInput"}, + SubgraphIDs: []string{"employees-subgraph"}, + IsNull: true, + }, + } + + assert.Len(t, inputUsageInfo, len(expectedUsage), "Null variable values should be tracked with IsNull=true") + for i := range expectedUsage { + assert.JSONEq(t, prettyJSON(t, &expectedUsage[i]), prettyJSON(t, inputUsageInfo[i]), "inputUsageInfo[%d]", i) + } +} + +// TestInputUsageWithPartialNullFields verifies that null fields within input objects are tracked with IsNull flag +func TestInputUsageWithPartialNullFields(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + findEmployees(criteria: EmployeeSearchInput): [Employee!]! + } + + type Employee { + id: ID! + } + + input EmployeeSearchInput { + hasPets: Boolean + department: String + minAge: Int + } + ` + + operation := ` + query FindEmployees($criteria: EmployeeSearchInput) { + findEmployees(criteria: $criteria) { + id + } + } + ` + + // Test with some null fields - only hasPets should be tracked, not department or minAge + variables := `{"criteria": {"hasPets": true, "department": null, "minAge": null}}` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "employees-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"findEmployees"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Employee", FieldNames: []string{"id"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "FindEmployees", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Should track the input type, hasPets field, and null fields with IsNull flag + expectedUsage := []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "Boolean", + TypeName: "EmployeeSearchInput", + Path: []string{"EmployeeSearchInput", "hasPets"}, + SubgraphIDs: []string{"employees-subgraph"}, + IsNull: false, + }, + { + NamedType: "String", + TypeName: "EmployeeSearchInput", + Path: []string{"EmployeeSearchInput", "department"}, + SubgraphIDs: []string{"employees-subgraph"}, + IsNull: true, + }, + { + NamedType: "Int", + TypeName: "EmployeeSearchInput", + Path: []string{"EmployeeSearchInput", "minAge"}, + SubgraphIDs: []string{"employees-subgraph"}, + IsNull: true, + }, + { + NamedType: "EmployeeSearchInput", + Path: []string{"EmployeeSearchInput"}, + SubgraphIDs: []string{"employees-subgraph"}, + IsNull: false, + }, + } + + assert.Len(t, inputUsageInfo, len(expectedUsage), "Should track all fields including null ones") + for i := range expectedUsage { + assert.JSONEq(t, prettyJSON(t, &expectedUsage[i]), prettyJSON(t, inputUsageInfo[i]), "inputUsageInfo[%d]", i) + } +} + +// TestInputScalarFieldsInVariables specifically tests that scalar fields inside input objects +// are tracked when passed as variables (not inline) +func TestInputScalarFieldsInVariables(t *testing.T) { + // Create a simple schema with input type containing scalar fields + schema := ` + schema { + query: Query + } + + type Query { + findEmployees(criteria: EmployeeSearchInput!): [Employee!]! + } + + type Employee { + id: ID! + } + + input EmployeeSearchInput { + hasPets: Boolean! + minAge: Int + department: String + } + ` + + operation := ` + query FindEmployeesWithVariable($criteria: EmployeeSearchInput!) { + findEmployees(criteria: $criteria) { + id + } + } + ` + + variables := `{"criteria": {"hasPets": true, "minAge": 25, "department": "Engineering"}}` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "employees-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"findEmployees"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Employee", FieldNames: []string{"id"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "FindEmployeesWithVariable", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + expectedInputUsageInfo := []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "Boolean", + TypeName: "EmployeeSearchInput", + Path: []string{"EmployeeSearchInput", "hasPets"}, + SubgraphIDs: []string{"employees-subgraph"}, + IsNull: false, + }, + { + NamedType: "Int", + TypeName: "EmployeeSearchInput", + Path: []string{"EmployeeSearchInput", "minAge"}, + SubgraphIDs: []string{"employees-subgraph"}, + IsNull: false, + }, + { + NamedType: "String", + TypeName: "EmployeeSearchInput", + Path: []string{"EmployeeSearchInput", "department"}, + SubgraphIDs: []string{"employees-subgraph"}, + IsNull: false, + }, + { + NamedType: "EmployeeSearchInput", + Path: []string{"EmployeeSearchInput"}, + SubgraphIDs: []string{"employees-subgraph"}, + IsNull: false, + }, + } + + assert.Len(t, inputUsageInfo, len(expectedInputUsageInfo)) + for i := range expectedInputUsageInfo { + assert.JSONEq(t, prettyJSON(t, &expectedInputUsageInfo[i]), prettyJSON(t, inputUsageInfo[i]), "inputUsageInfo[%d]", i) + } +} + +// TestInputNestedScalarFields tests that scalar fields inside nested input objects +// are tracked correctly with proper paths and subgraph IDs +func TestInputNestedScalarFields(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + search(filter: SearchFilter!): [Result!]! + } + + type Result { + id: ID! + } + + input SearchFilter { + name: String + criteria: SearchCriteria + tags: [String] + } + + input SearchCriteria { + minScore: Int! + maxScore: Int + isActive: Boolean + nested: NestedCriteria + } + + input NestedCriteria { + value: String! + } + ` + + operation := ` + query SearchQuery($filter: SearchFilter!) { + search(filter: $filter) { + id + } + } + ` + + variables := `{ + "filter": { + "name": "test", + "criteria": { + "minScore": 10, + "maxScore": 100, + "isActive": true, + "nested": { + "value": "deep" + } + }, + "tags": ["tag1", "tag2"] + } + }` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "search-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"search"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Result", FieldNames: []string{"id"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "SearchQuery", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + expectedInputUsageInfo := []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "String", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "name"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, + }, + { + NamedType: "Int", + TypeName: "SearchCriteria", + Path: []string{"SearchCriteria", "minScore"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, + }, + { + NamedType: "Int", + TypeName: "SearchCriteria", + Path: []string{"SearchCriteria", "maxScore"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, + }, + { + NamedType: "Boolean", + TypeName: "SearchCriteria", + Path: []string{"SearchCriteria", "isActive"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, + }, + { + NamedType: "String", + TypeName: "NestedCriteria", + Path: []string{"NestedCriteria", "value"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, + }, + { + NamedType: "NestedCriteria", + TypeName: "SearchCriteria", + Path: []string{"SearchCriteria", "nested"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, + }, + { + NamedType: "SearchCriteria", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "criteria"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, + }, + { + NamedType: "String", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "tags"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, + }, + { + NamedType: "SearchFilter", + Path: []string{"SearchFilter"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, + }, + } + + assert.Len(t, inputUsageInfo, len(expectedInputUsageInfo)) + for i := range expectedInputUsageInfo { + assert.JSONEq(t, prettyJSON(t, &expectedInputUsageInfo[i]), prettyJSON(t, inputUsageInfo[i]), "inputUsageInfo[%d]", i) + } +} + +// TestMultipleSubgraphs tests that SubgraphIDs are correctly extracted when +// fields, arguments, and inputs come from different subgraphs +func TestMultipleSubgraphs(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + user(id: ID!): User + product(filter: ProductFilter!): Product + } + + type User { + id: ID! + name: String! + orders: [Order!]! + } + + type Order { + id: ID! + total: Float! + } + + type Product { + id: ID! + name: String! + price: Float! + } + + input ProductFilter { + minPrice: Float + maxPrice: Float + category: String + } + ` + + operation := ` + query GetData($userId: ID!, $productFilter: ProductFilter!) { + user(id: $userId) { + id + name + orders { + id + total + } + } + product(filter: $productFilter) { + id + name + price + } + } + ` + + variables := `{ + "userId": "123", + "productFilter": { + "minPrice": 10.0, + "maxPrice": 100.0, + "category": "electronics" + } + }` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + // Create multiple subgraphs - users and products come from different sources + usersSubgraph, err := plan.NewDataSourceConfiguration[any]( + "users-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"user"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "User", FieldNames: []string{"id", "name", "orders"}}, + {TypeName: "Order", FieldNames: []string{"id", "total"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + productsSubgraph, err := plan.NewDataSourceConfiguration[any]( + "products-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"product"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Product", FieldNames: []string{"id", "name", "price"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{usersSubgraph, productsSubgraph}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "GetData", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + fieldUsageInfo := GetTypeFieldUsageInfo(generatedPlan) + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Verify field usage - fields should be attributed to the correct subgraph + expectedFieldUsageInfo := []*graphqlmetricsv1.TypeFieldUsageInfo{ + { + TypeNames: []string{"Query"}, + Path: []string{"user"}, + NamedType: "User", + SubgraphIDs: []string{"users-subgraph"}, + }, + { + TypeNames: []string{"User"}, + Path: []string{"user", "id"}, + NamedType: "ID", + SubgraphIDs: []string{"users-subgraph"}, + }, + { + TypeNames: []string{"User"}, + Path: []string{"user", "name"}, + NamedType: "String", + SubgraphIDs: []string{"users-subgraph"}, + }, + { + TypeNames: []string{"User"}, + Path: []string{"user", "orders"}, + NamedType: "Order", + SubgraphIDs: []string{"users-subgraph"}, + }, + { + TypeNames: []string{"Order"}, + Path: []string{"user", "orders", "id"}, + NamedType: "ID", + SubgraphIDs: []string{"users-subgraph"}, + }, + { + TypeNames: []string{"Order"}, + Path: []string{"user", "orders", "total"}, + NamedType: "Float", + SubgraphIDs: []string{"users-subgraph"}, + }, + { + TypeNames: []string{"Query"}, + Path: []string{"product"}, + NamedType: "Product", + SubgraphIDs: []string{"products-subgraph"}, + }, + { + TypeNames: []string{"Product"}, + Path: []string{"product", "id"}, + NamedType: "ID", + SubgraphIDs: []string{"products-subgraph"}, + }, + { + TypeNames: []string{"Product"}, + Path: []string{"product", "name"}, + NamedType: "String", + SubgraphIDs: []string{"products-subgraph"}, + }, + { + TypeNames: []string{"Product"}, + Path: []string{"product", "price"}, + NamedType: "Float", + SubgraphIDs: []string{"products-subgraph"}, + }, + } + + // Verify argument usage - arguments should be attributed to the correct subgraph + expectedArgumentUsageInfo := []*graphqlmetricsv1.ArgumentUsageInfo{ + { + TypeName: "Query", + NamedType: "ID", + Path: []string{"user", "id"}, + SubgraphIDs: []string{"users-subgraph"}, + IsNull: false, + }, + { + TypeName: "Query", + NamedType: "ProductFilter", + Path: []string{"product", "filter"}, + SubgraphIDs: []string{"products-subgraph"}, + IsNull: false, + }, + } + + // Verify input usage - inputs should be attributed to the correct subgraph + expectedInputUsageInfo := []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "ID", + Path: []string{"ID"}, + SubgraphIDs: []string{"users-subgraph"}, + IsNull: false, + }, + { + NamedType: "Float", + TypeName: "ProductFilter", + Path: []string{"ProductFilter", "minPrice"}, + SubgraphIDs: []string{"products-subgraph"}, + IsNull: false, + }, + { + NamedType: "Float", + TypeName: "ProductFilter", + Path: []string{"ProductFilter", "maxPrice"}, + SubgraphIDs: []string{"products-subgraph"}, + IsNull: false, + }, + { + NamedType: "String", + TypeName: "ProductFilter", + Path: []string{"ProductFilter", "category"}, + SubgraphIDs: []string{"products-subgraph"}, + IsNull: false, + }, + { + NamedType: "ProductFilter", + Path: []string{"ProductFilter"}, + SubgraphIDs: []string{"products-subgraph"}, + IsNull: false, + }, + } + + // Assert all expectations + assert.Len(t, fieldUsageInfo, len(expectedFieldUsageInfo)) + for i := range expectedFieldUsageInfo { + assert.JSONEq(t, prettyJSON(t, expectedFieldUsageInfo[i]), prettyJSON(t, fieldUsageInfo[i].IntoGraphQLMetrics()), "fieldUsageInfo[%d]", i) + } + + assert.Len(t, argumentUsageInfo, len(expectedArgumentUsageInfo)) + for i := range expectedArgumentUsageInfo { + assert.JSONEq(t, prettyJSON(t, expectedArgumentUsageInfo[i]), prettyJSON(t, argumentUsageInfo[i]), "argumentUsageInfo[%d]", i) + } + + assert.Len(t, inputUsageInfo, len(expectedInputUsageInfo)) + for i := range expectedInputUsageInfo { + assert.JSONEq(t, prettyJSON(t, &expectedInputUsageInfo[i]), prettyJSON(t, inputUsageInfo[i]), "inputUsageInfo[%d]", i) + } + + // Additionally, verify that no field is wrongly attributed to the wrong subgraph + for _, info := range fieldUsageInfo { + if len(info.Path) > 0 { + firstPath := info.Path[0] + if firstPath == "user" { + assert.Equal(t, []string{"users-subgraph"}, info.SubgraphIDs, "user fields should only reference users-subgraph") + } else if firstPath == "product" { + assert.Equal(t, []string{"products-subgraph"}, info.SubgraphIDs, "product fields should only reference products-subgraph") + } + } + } + + // Verify arguments are attributed correctly + for _, info := range argumentUsageInfo { + if len(info.Path) > 0 { + firstPath := info.Path[0] + if firstPath == "user" { + assert.Equal(t, []string{"users-subgraph"}, info.SubgraphIDs, "user arguments should reference users-subgraph") + } else if firstPath == "product" { + assert.Equal(t, []string{"products-subgraph"}, info.SubgraphIDs, "product arguments should reference products-subgraph") + } + } + } +} + +// TestNullPropagationScenarios tests the null propagation scenarios from the breaking change detection document +func TestNullPropagationScenarios(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + a(input: Input): ID + } + + input Input { + a: NestedInput + } + + input NestedInput { + a: SuperNestedInput + } + + input SuperNestedInput { + a: ID + } + ` + + tests := []struct { + name string + variables string + expectedUsage []graphqlmetricsv1.InputUsageInfo + description string + }{ + { + name: "input null - explicitly", + variables: `{"input": null}`, + expectedUsage: []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "Input", + Path: []string{"Input"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: true, + }, + }, + description: "Explicit null at top level - chain ends here", + }, + { + name: "input empty object - implicit null nested field", + variables: `{"input": {}}`, + expectedUsage: []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "NestedInput", + TypeName: "Input", + Path: []string{"Input", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: true, + }, + { + NamedType: "Input", + Path: []string{"Input"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + }, + description: "Empty object means nested field 'a' is implicitly null and should be tracked", + }, + { + name: "input.a null - explicitly", + variables: `{"input": {"a": null}}`, + expectedUsage: []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "NestedInput", + TypeName: "Input", + Path: []string{"Input", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: true, + }, + { + NamedType: "Input", + Path: []string{"Input"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + }, + description: "Explicit null at nested level - chain ends at Input.a", + }, + { + name: "input.a empty object - implicit null doubly nested field", + variables: `{"input": {"a": {}}}`, + expectedUsage: []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "SuperNestedInput", + TypeName: "NestedInput", + Path: []string{"NestedInput", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: true, + }, + { + NamedType: "NestedInput", + TypeName: "Input", + Path: []string{"Input", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + { + NamedType: "Input", + Path: []string{"Input"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + }, + description: "Empty nested object means doubly nested field 'a' is implicitly null and should be tracked", + }, + { + name: "input.a.a null - explicitly", + variables: `{"input": {"a": {"a": null}}}`, + expectedUsage: []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "SuperNestedInput", + TypeName: "NestedInput", + Path: []string{"NestedInput", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: true, + }, + { + NamedType: "NestedInput", + TypeName: "Input", + Path: []string{"Input", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + { + NamedType: "Input", + Path: []string{"Input"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + }, + description: "Explicit null at doubly nested level - chain ends at NestedInput.a", + }, + { + name: "input.a.a empty object - implicit null triply nested field", + variables: `{"input": {"a": {"a": {}}}}`, + expectedUsage: []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "ID", + TypeName: "SuperNestedInput", + Path: []string{"SuperNestedInput", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: true, + }, + { + NamedType: "SuperNestedInput", + TypeName: "NestedInput", + Path: []string{"NestedInput", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + { + NamedType: "NestedInput", + TypeName: "Input", + Path: []string{"Input", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + { + NamedType: "Input", + Path: []string{"Input"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + }, + description: "Empty doubly nested object means triply nested field 'a' is implicitly null and should be tracked", + }, + { + name: "input.a.a.a null - explicitly", + variables: `{"input": {"a": {"a": {"a": null}}}}`, + expectedUsage: []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "ID", + TypeName: "SuperNestedInput", + Path: []string{"SuperNestedInput", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: true, + }, + { + NamedType: "SuperNestedInput", + TypeName: "NestedInput", + Path: []string{"NestedInput", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + { + NamedType: "NestedInput", + TypeName: "Input", + Path: []string{"Input", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + { + NamedType: "Input", + Path: []string{"Input"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + }, + description: "Explicit null at leaf level - full chain is tracked with leaf as null", + }, + { + name: "input.a.a.a with value - no nulls", + variables: `{"input": {"a": {"a": {"a": "123"}}}}`, + expectedUsage: []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "ID", + TypeName: "SuperNestedInput", + Path: []string{"SuperNestedInput", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + { + NamedType: "SuperNestedInput", + TypeName: "NestedInput", + Path: []string{"NestedInput", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + { + NamedType: "NestedInput", + TypeName: "Input", + Path: []string{"Input", "a"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + { + NamedType: "Input", + Path: []string{"Input"}, + SubgraphIDs: []string{"test-subgraph"}, + IsNull: false, + }, + }, + description: "Full chain with actual value - no nulls in the chain", + }, + } + + operation := ` + query TestQuery($input: Input) { + a(input: $input) + } + ` + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "test-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"a"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "TestQuery", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(tt.variables) + require.NoError(t, err) + + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + assert.Len(t, inputUsageInfo, len(tt.expectedUsage), tt.description) + for i := range tt.expectedUsage { + assert.JSONEq(t, prettyJSON(t, &tt.expectedUsage[i]), prettyJSON(t, inputUsageInfo[i]), + "inputUsageInfo[%d] - %s", i, tt.description) + } + }) + } +} + +// TestArgumentUsageWithNullArgument verifies that null argument values are tracked with IsNull flag +func TestArgumentUsageWithNullArgument(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + findEmployees(criteria: SearchInput): [Employee!]! + } + + type Employee { + id: ID! + details: EmployeeDetails + } + + type EmployeeDetails { + forename: String + } + + input SearchInput { + department: String + minAge: Int + } + ` + + operation := ` + query FindEmployeesWithVariable($criteria: SearchInput) { + findEmployees(criteria: $criteria) { + id + details { + forename + } + } + } + ` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "employees-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"findEmployees"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Employee", FieldNames: []string{"id", "details"}}, + {TypeName: "EmployeeDetails", FieldNames: []string{"forename"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "FindEmployeesWithVariable", report) + require.False(t, report.HasErrors()) + + vars := astjson.MustParse(`{"criteria": null}`) + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Should track the null argument with IsNull=true + expectedUsage := []*graphqlmetricsv1.ArgumentUsageInfo{ + { + TypeName: "Query", + NamedType: "SearchInput", + Path: []string{"findEmployees", "criteria"}, + SubgraphIDs: []string{"employees-subgraph"}, + IsNull: true, + }, + } + + assert.Len(t, argumentUsageInfo, len(expectedUsage), "Null argument should be tracked with IsNull=true") + for i := range expectedUsage { + assert.JSONEq(t, prettyJSON(t, expectedUsage[i]), prettyJSON(t, argumentUsageInfo[i]), "argumentUsageInfo[%d]", i) + } +} + +// TestVariableRemapping verifies that variable name remapping works correctly after normalization. +// This tests the real-world scenario where operations are normalized/minified and variable names +// change (e.g., $criteria → $a), requiring remapping to find variable values in the JSON. +func TestVariableRemapping(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + findEmployees(criteria: SearchInput, status: String): [Employee!]! + } + + type Employee { + id: ID! + details: EmployeeDetails + } + + type EmployeeDetails { + forename: String + surname: String + } + + input SearchInput { + department: String + minAge: Int + active: Boolean + } + ` + + // Original operation with descriptive variable names + operation := ` + query FindEmployeesQuery($searchCriteria: SearchInput, $employeeStatus: String) { + findEmployees(criteria: $searchCriteria, status: $employeeStatus) { + id + details { + forename + surname + } + } + } + ` + + // Variables use original names + variables := `{ + "searchCriteria": { + "department": "Engineering", + "minAge": 25, + "active": true + }, + "employeeStatus": null + }` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + // Use the actual variables remapper to generate the remapping + // This simulates what happens in the router during operation processing + remapper := astnormalization.NewVariablesMapper() + op.Input.Variables = []byte(variables) + remapReport := &operationreport.Report{} + variablesMap := remapper.NormalizeOperation(&op, &def, remapReport) + require.False(t, remapReport.HasErrors()) + require.NotEmpty(t, variablesMap, "Variables should be remapped after normalization") + + // variablesMap maps normalized names (e.g., "a", "b") to original names (e.g., "searchCriteria", "employeeStatus") + t.Logf("Variable remapping: %+v", variablesMap) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "employees-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"findEmployees"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Employee", FieldNames: []string{"id", "details"}}, + {TypeName: "EmployeeDetails", FieldNames: []string{"forename", "surname"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "FindEmployeesQuery", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + // Test with remapping - should correctly find variables and track usage + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, vars, generatedPlan, variablesMap) + require.NoError(t, err) + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, variablesMap) + require.NoError(t, err) + + // Verify argument usage tracks both arguments + // One should be null (employeeStatus), one should be non-null (searchCriteria) + require.Len(t, argumentUsageInfo, 2, "Should track both arguments") + + var criteriaArg, statusArg *graphqlmetricsv1.ArgumentUsageInfo + for _, arg := range argumentUsageInfo { + switch arg.NamedType { + case "SearchInput": + criteriaArg = arg + case "String": + statusArg = arg + } + } + + require.NotNil(t, criteriaArg, "Should find criteria argument") + require.NotNil(t, statusArg, "Should find status argument") + + // Verify criteria argument (non-null input object) + assert.Equal(t, "Query", criteriaArg.TypeName) + assert.Equal(t, "SearchInput", criteriaArg.NamedType) + assert.Equal(t, []string{"findEmployees", "criteria"}, criteriaArg.Path) + assert.False(t, criteriaArg.IsNull, "searchCriteria should not be null") + + // Verify status argument (null string) + assert.Equal(t, "Query", statusArg.TypeName) + assert.Equal(t, "String", statusArg.NamedType) + assert.Equal(t, []string{"findEmployees", "status"}, statusArg.Path) + assert.True(t, statusArg.IsNull, "employeeStatus should be null - this is the critical test for remapping!") + + // Verify input usage tracks the input object and its fields + require.GreaterOrEqual(t, len(inputUsageInfo), 4, "Should track SearchInput and its fields") + + // Find the root SearchInput type + var searchInputRoot *graphqlmetricsv1.InputUsageInfo + for _, input := range inputUsageInfo { + if input.NamedType == "SearchInput" && len(input.Path) == 1 { + searchInputRoot = input + break + } + } + require.NotNil(t, searchInputRoot, "Should track root SearchInput type") + assert.False(t, searchInputRoot.IsNull, "SearchInput should not be null") + + // Verify individual fields were tracked + fieldMap := make(map[string]*graphqlmetricsv1.InputUsageInfo) + for _, input := range inputUsageInfo { + if input.TypeName == "SearchInput" && len(input.Path) == 2 { + fieldMap[input.Path[1]] = input + } + } + + // All fields should be present and non-null + assert.Contains(t, fieldMap, "department", "Should track department field") + assert.Contains(t, fieldMap, "minAge", "Should track minAge field") + assert.Contains(t, fieldMap, "active", "Should track active field") + + if departmentField, ok := fieldMap["department"]; ok { + assert.Equal(t, "String", departmentField.NamedType) + assert.False(t, departmentField.IsNull, "department has a value") + } + + if minAgeField, ok := fieldMap["minAge"]; ok { + assert.Equal(t, "Int", minAgeField.NamedType) + assert.False(t, minAgeField.IsNull, "minAge has a value") + } + + if activeField, ok := fieldMap["active"]; ok { + assert.Equal(t, "Boolean", activeField.NamedType) + assert.False(t, activeField.IsNull, "active has a value") + } + + // Test without remapping - should fail to find variables correctly + argumentUsageInfoNoRemap, err := GetArgumentUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Without remapping, null detection for variable-based arguments won't work correctly + // because the AST uses normalized names but variables JSON uses original names + var statusArgNoRemap *graphqlmetricsv1.ArgumentUsageInfo + for _, arg := range argumentUsageInfoNoRemap { + if arg.NamedType == "String" { + statusArgNoRemap = arg + break + } + } + + // Without remapping, we can't correctly detect the null status because we can't find + // the variable value (AST has normalized name, JSON has original name) + // This demonstrates why remapping is critical + if statusArgNoRemap != nil { + // The behavior without remapping: can't find the variable, so defaults to false + assert.False(t, statusArgNoRemap.IsNull, "Without remapping, can't correctly detect null status") + } +} + +// TestImplicitNullArguments verifies that arguments are tracked even when not provided in the operation. +// This is critical for breaking change detection - we need to know if optional arguments are being used. +func TestImplicitNullArguments(t *testing.T) { + t.Run("no arguments provided", func(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + findEmployees(criteria: SearchInput, status: String, limit: Int): String + } + + input SearchInput { + department: String + } + ` + + // Operation WITHOUT any arguments - all should be tracked as implicitly null + operation := ` + query FindEmployees { + findEmployees + } + ` + + variables := `{}` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "employees-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"findEmployees"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "FindEmployees", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Should track ALL three arguments even though none were provided + require.Len(t, argumentUsageInfo, 3, "Should track all 3 arguments (criteria, status, limit) even though none were provided") + + // Verify all arguments are tracked as implicitly null + argumentMap := make(map[string]*graphqlmetricsv1.ArgumentUsageInfo) + for _, arg := range argumentUsageInfo { + if len(arg.Path) == 2 && arg.Path[0] == "findEmployees" { + argumentMap[arg.Path[1]] = arg + } + } + + // Verify criteria argument (SearchInput) + require.Contains(t, argumentMap, "criteria", "Should track criteria argument") + criteriaArg := argumentMap["criteria"] + assert.Equal(t, "Query", criteriaArg.TypeName) + assert.Equal(t, "SearchInput", criteriaArg.NamedType) + assert.Equal(t, []string{"findEmployees", "criteria"}, criteriaArg.Path) + assert.True(t, criteriaArg.IsNull, "criteria should be implicitly null (not provided)") + + // Verify status argument (String) + require.Contains(t, argumentMap, "status", "Should track status argument") + statusArg := argumentMap["status"] + assert.Equal(t, "Query", statusArg.TypeName) + assert.Equal(t, "String", statusArg.NamedType) + assert.Equal(t, []string{"findEmployees", "status"}, statusArg.Path) + assert.True(t, statusArg.IsNull, "status should be implicitly null (not provided)") + + // Verify limit argument (Int) + require.Contains(t, argumentMap, "limit", "Should track limit argument") + limitArg := argumentMap["limit"] + assert.Equal(t, "Query", limitArg.TypeName) + assert.Equal(t, "Int", limitArg.NamedType) + assert.Equal(t, []string{"findEmployees", "limit"}, limitArg.Path) + assert.True(t, limitArg.IsNull, "limit should be implicitly null (not provided)") + }) + + t.Run("mixed - some arguments provided, some not", func(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + findEmployees(criteria: SearchInput, status: String, limit: Int): String + } + + input SearchInput { + department: String + } + ` + + // Operation with only 'status' argument - criteria and limit should be tracked as implicit nulls + operation := ` + query FindEmployees { + findEmployees(status: "active") + } + ` + + variables := `{}` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "employees-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"findEmployees"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "FindEmployees", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Should track ALL three arguments: status (explicit), criteria & limit (implicit) + require.Len(t, argumentUsageInfo, 3, "Should track all 3 arguments") + + // Verify argument tracking + argumentMap := make(map[string]*graphqlmetricsv1.ArgumentUsageInfo) + for _, arg := range argumentUsageInfo { + if len(arg.Path) == 2 && arg.Path[0] == "findEmployees" { + argumentMap[arg.Path[1]] = arg + } + } + + // Verify status argument (provided explicitly with value) + require.Contains(t, argumentMap, "status") + statusArg := argumentMap["status"] + assert.Equal(t, "String", statusArg.NamedType) + assert.False(t, statusArg.IsNull, "status was provided with value") + + // Verify criteria argument (not provided - implicit null) + require.Contains(t, argumentMap, "criteria") + criteriaArg := argumentMap["criteria"] + assert.Equal(t, "SearchInput", criteriaArg.NamedType) + assert.True(t, criteriaArg.IsNull, "criteria should be implicitly null (not provided)") + + // Verify limit argument (not provided - implicit null) + require.Contains(t, argumentMap, "limit") + limitArg := argumentMap["limit"] + assert.Equal(t, "Int", limitArg.NamedType) + assert.True(t, limitArg.IsNull, "limit should be implicitly null (not provided)") + }) +} + +// TestImplicitInputTypeArgumentUsage verifies that when an input type argument is not provided, +// we track input usage for that type with IsNull: true for breaking change detection. +func TestImplicitInputTypeArgumentUsage(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + findEmployees(criteria: SearchInput, status: String, limit: Int): [Employee!]! + } + + type Employee { + id: ID! + details: EmployeeDetails + } + + type EmployeeDetails { + forename: String + } + + input SearchInput { + department: String + title: String + } + ` + + // Operation without providing the SearchInput argument + operation := ` + query FindEmployees { + findEmployees { + id + details { + forename + } + } + } + ` + + variables := `{}` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "employees-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"findEmployees"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Employee", FieldNames: []string{"id", "details"}}, + {TypeName: "EmployeeDetails", FieldNames: []string{"forename"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "FindEmployees", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + // Get argument usage - should include implicit nulls for criteria, status, limit + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Get input usage - should include SearchInput from the implicitly null criteria argument + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Verify argument usage includes all three arguments as implicitly null + require.Len(t, argumentUsageInfo, 3, "Should track all 3 arguments (criteria, status, limit)") + + var criteriaArg *graphqlmetricsv1.ArgumentUsageInfo + for _, arg := range argumentUsageInfo { + if len(arg.Path) == 2 && arg.Path[0] == "findEmployees" && arg.Path[1] == "criteria" { + criteriaArg = arg + break + } + } + require.NotNil(t, criteriaArg, "Should find criteria argument") + assert.Equal(t, "SearchInput", criteriaArg.NamedType) + assert.True(t, criteriaArg.IsNull, "criteria should be implicitly null") + + // CRITICAL: Verify input usage includes SearchInput from the implicitly null criteria argument + var searchInputUsage *graphqlmetricsv1.InputUsageInfo + for _, input := range inputUsageInfo { + if input.NamedType == "SearchInput" && len(input.Path) == 1 && input.Path[0] == "SearchInput" { + searchInputUsage = input + break + } + } + require.NotNil(t, searchInputUsage, "Should track input usage for SearchInput type even though argument wasn't provided") + assert.Equal(t, "SearchInput", searchInputUsage.NamedType) + assert.Equal(t, []string{"SearchInput"}, searchInputUsage.Path) + assert.True(t, searchInputUsage.IsNull, "SearchInput should be marked as null since argument wasn't provided") + assert.Equal(t, []string{"employees-subgraph"}, searchInputUsage.SubgraphIDs, "Should have correct subgraph ID") +} + +// TestInputUsageWithEmptyVariables verifies that when a variable is defined and used in an argument, +// but the variables JSON is empty, we still track the input type usage with IsNull: true. +func TestInputUsageWithEmptyVariables(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + findEmployees(criteria: SearchInput): [Employee!]! + } + + type Employee { + id: ID! + details: EmployeeDetails + } + + type EmployeeDetails { + forename: String + } + + input SearchInput { + department: String + title: String + } + ` + + // Operation with variable defined and used in argument, but variables JSON will be empty + operation := ` + query FindEmployeesWithVariable($criteria: SearchInput) { + findEmployees(criteria: $criteria) { + id + details { + forename + } + } + } + ` + + variables := `{}` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "employees-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"findEmployees"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Employee", FieldNames: []string{"id", "details"}}, + {TypeName: "EmployeeDetails", FieldNames: []string{"forename"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "FindEmployeesWithVariable", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + // Get input usage - should include SearchInput even though variable is not provided + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Verify input usage includes SearchInput from the variable definition + var searchInputUsage *graphqlmetricsv1.InputUsageInfo + for _, input := range inputUsageInfo { + if input.NamedType == "SearchInput" && len(input.Path) == 1 && input.Path[0] == "SearchInput" { + searchInputUsage = input + break + } + } + require.NotNil(t, searchInputUsage, "Should track input usage for SearchInput type even though variable is not provided in empty variables JSON") + assert.Equal(t, "SearchInput", searchInputUsage.NamedType) + assert.Equal(t, []string{"SearchInput"}, searchInputUsage.Path) + assert.True(t, searchInputUsage.IsNull, "SearchInput should be marked as null since variable is not provided") + assert.Equal(t, []string{"employees-subgraph"}, searchInputUsage.SubgraphIDs, "Should have correct subgraph ID") +} + +// TestSharedInputObjectAcrossSubgraphs verifies that when an input object variable is used by +// multiple fields from different subgraphs, the input usage (including nested fields) is +// attributed to all subgraphs that use it (merged). +func TestSharedInputObjectAcrossSubgraphs(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + findUsers(criteria: SearchInput!): [User!]! + findProducts(criteria: SearchInput!): [Product!]! + findOrders(criteria: SearchInput!): [Order!]! + } + + type User { + id: ID! + name: String! + } + + type Product { + id: ID! + title: String! + } + + type Order { + id: ID! + status: String! + } + + input SearchInput { + keyword: String + category: String + limit: Int + } + ` + + // Single input object variable used by three fields from three different subgraphs + operation := ` + query Search($criteria: SearchInput!) { + findUsers(criteria: $criteria) { + id + name + } + findProducts(criteria: $criteria) { + id + title + } + findOrders(criteria: $criteria) { + id + status + } + } + ` + + variables := `{"criteria": {"keyword": "test", "category": "electronics"}}` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + // Create three subgraphs - each serving one root field + usersSubgraph, err := plan.NewDataSourceConfiguration[any]( + "users-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"findUsers"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "User", FieldNames: []string{"id", "name"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + productsSubgraph, err := plan.NewDataSourceConfiguration[any]( + "products-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"findProducts"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Product", FieldNames: []string{"id", "title"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + ordersSubgraph, err := plan.NewDataSourceConfiguration[any]( + "orders-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"findOrders"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Order", FieldNames: []string{"id", "status"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{usersSubgraph, productsSubgraph, ordersSubgraph}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "Search", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // The $criteria variable is used by findUsers, findProducts, and findOrders + // Each from a different subgraph, so we expect THREE argument entries + require.Len(t, argumentUsageInfo, 3, "Should have 3 argument usage entries") + + // Verify each argument has its own subgraph + argumentsByField := make(map[string]*graphqlmetricsv1.ArgumentUsageInfo) + for _, arg := range argumentUsageInfo { + if len(arg.Path) == 2 && arg.Path[1] == "criteria" { + argumentsByField[arg.Path[0]] = arg + } + } + + require.Contains(t, argumentsByField, "findUsers") + require.Contains(t, argumentsByField, "findProducts") + require.Contains(t, argumentsByField, "findOrders") + + assert.Equal(t, []string{"users-subgraph"}, argumentsByField["findUsers"].SubgraphIDs) + assert.Equal(t, []string{"products-subgraph"}, argumentsByField["findProducts"].SubgraphIDs) + assert.Equal(t, []string{"orders-subgraph"}, argumentsByField["findOrders"].SubgraphIDs) + + // CRITICAL: Input usage should merge all three subgraphs + // We should have entries for: + // 1. SearchInput (root) - merged subgraphs + // 2. SearchInput.keyword - merged subgraphs + // 3. SearchInput.category - merged subgraphs + // 4. SearchInput.limit (implicit null) - merged subgraphs + + inputsByPath := make(map[string]*graphqlmetricsv1.InputUsageInfo) + for _, input := range inputUsageInfo { + pathKey := strings.Join(input.Path, ".") + inputsByPath[pathKey] = input + } + + // Verify root SearchInput has all three subgraphs merged + require.Contains(t, inputsByPath, "SearchInput", "Should track root SearchInput") + searchInputRoot := inputsByPath["SearchInput"] + assert.Equal(t, "SearchInput", searchInputRoot.NamedType) + assert.False(t, searchInputRoot.IsNull) + assert.ElementsMatch(t, []string{"users-subgraph", "products-subgraph", "orders-subgraph"}, + searchInputRoot.SubgraphIDs, "Root SearchInput should have all three subgraphs merged") + assert.Len(t, searchInputRoot.SubgraphIDs, 3, "Should have exactly 3 subgraphs (no duplicates)") + + // Verify keyword field has all three subgraphs merged + require.Contains(t, inputsByPath, "SearchInput.keyword", "Should track SearchInput.keyword") + keywordField := inputsByPath["SearchInput.keyword"] + assert.Equal(t, "String", keywordField.NamedType) + assert.False(t, keywordField.IsNull) + assert.ElementsMatch(t, []string{"users-subgraph", "products-subgraph", "orders-subgraph"}, + keywordField.SubgraphIDs, "keyword field should have all three subgraphs merged") + + // Verify category field has all three subgraphs merged + require.Contains(t, inputsByPath, "SearchInput.category", "Should track SearchInput.category") + categoryField := inputsByPath["SearchInput.category"] + assert.Equal(t, "String", categoryField.NamedType) + assert.False(t, categoryField.IsNull) + assert.ElementsMatch(t, []string{"users-subgraph", "products-subgraph", "orders-subgraph"}, + categoryField.SubgraphIDs, "category field should have all three subgraphs merged") + + // Verify implicit null field (limit) has all three subgraphs merged + require.Contains(t, inputsByPath, "SearchInput.limit", "Should track implicitly null SearchInput.limit") + limitField := inputsByPath["SearchInput.limit"] + assert.Equal(t, "Int", limitField.NamedType) + assert.True(t, limitField.IsNull, "limit should be implicitly null (not provided)") + assert.ElementsMatch(t, []string{"users-subgraph", "products-subgraph", "orders-subgraph"}, + limitField.SubgraphIDs, "implicit null field should also have all three subgraphs merged") +} + +// TestSharedVariableAcrossSubgraphs verifies that when a variable is used by multiple fields +// from different subgraphs, the variable's input usage is attributed to all subgraphs (merged). +func TestSharedVariableAcrossSubgraphs(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + user(id: ID!): User + product(id: ID!): Product + order(id: ID!): Order + } + + type User { + id: ID! + name: String! + } + + type Product { + id: ID! + title: String! + } + + type Order { + id: ID! + status: String! + } + ` + + // Single variable $sharedId is used by three fields from three different subgraphs + operation := ` + query GetData($sharedId: ID!) { + user(id: $sharedId) { + id + name + } + product(id: $sharedId) { + id + title + } + order(id: $sharedId) { + id + status + } + } + ` + + variables := `{"sharedId": "123"}` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + // Create three subgraphs - each serving one root field + usersSubgraph, err := plan.NewDataSourceConfiguration[any]( + "users-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"user"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "User", FieldNames: []string{"id", "name"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + productsSubgraph, err := plan.NewDataSourceConfiguration[any]( + "products-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"product"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Product", FieldNames: []string{"id", "title"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + ordersSubgraph, err := plan.NewDataSourceConfiguration[any]( + "orders-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"order"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Order", FieldNames: []string{"id", "status"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{usersSubgraph, productsSubgraph, ordersSubgraph}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "GetData", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // The $sharedId variable is used by user(id:), product(id:), and order(id:) + // Each from a different subgraph, so we expect THREE argument entries + expectedArgumentUsageInfo := []*graphqlmetricsv1.ArgumentUsageInfo{ + { + TypeName: "Query", + NamedType: "ID", + Path: []string{"user", "id"}, + SubgraphIDs: []string{"users-subgraph"}, + IsNull: false, + }, + { + TypeName: "Query", + NamedType: "ID", + Path: []string{"product", "id"}, + SubgraphIDs: []string{"products-subgraph"}, + IsNull: false, + }, + { + TypeName: "Query", + NamedType: "ID", + Path: []string{"order", "id"}, + SubgraphIDs: []string{"orders-subgraph"}, + IsNull: false, + }, + } + + // The $sharedId variable's input usage should be attributed to ALL THREE subgraphs + // This is the critical test: mergeSubgraphIDs should combine all three + expectedInputUsageInfo := []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "ID", + Path: []string{"ID"}, + // MERGED: All three subgraphs that use this variable + SubgraphIDs: []string{"users-subgraph", "products-subgraph", "orders-subgraph"}, + IsNull: false, + }, + } + + // Verify argument usage + assert.Len(t, argumentUsageInfo, len(expectedArgumentUsageInfo)) + for i := range expectedArgumentUsageInfo { + assert.JSONEq(t, prettyJSON(t, expectedArgumentUsageInfo[i]), prettyJSON(t, argumentUsageInfo[i]), + "argumentUsageInfo[%d]", i) + } + + // Verify input usage - the critical assertion + assert.Len(t, inputUsageInfo, len(expectedInputUsageInfo), "Should have one input usage entry for the shared variable") + + // The input usage should have all three subgraph IDs merged + actualInput := inputUsageInfo[0] + assert.Equal(t, "ID", actualInput.NamedType, "Input type should be ID") + assert.Equal(t, []string{"ID"}, actualInput.Path, "Input path should be [ID]") + assert.False(t, actualInput.IsNull, "Input should not be null") + + // Critical assertion: verify all three subgraphs are present (order-independent) + assert.ElementsMatch(t, expectedInputUsageInfo[0].SubgraphIDs, actualInput.SubgraphIDs, + "Input usage should be attributed to all three subgraphs that use the variable") + + // Verify we have exactly 3 subgraphs (no duplicates) + assert.Len(t, actualInput.SubgraphIDs, 3, "Should have exactly 3 subgraph IDs (no duplicates)") +} + +// TestNullListHandling verifies that null list values are properly tracked with IsNull flag. +// This is critical for breaking change detection when a nullable list type becomes non-nullable. +func TestNullListHandling(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + search(filter: SearchFilter!): [Result!]! + } + + type Result { + id: ID! + } + + input SearchFilter { + tags: [String] + categories: [String] + scores: [Int] + } + ` + + tests := []struct { + name string + variables string + expectedUsage []graphqlmetricsv1.InputUsageInfo + description string + }{ + { + name: "null list - tags is explicitly null", + variables: `{ + "filter": { + "tags": null, + "categories": ["cat1", "cat2"] + } + }`, + expectedUsage: []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "String", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "tags"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: true, // Null list should be marked as null + }, + { + NamedType: "String", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "categories"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, + }, + { + NamedType: "Int", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "scores"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: true, // Implicit null (missing) + }, + { + NamedType: "SearchFilter", + Path: []string{"SearchFilter"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, + }, + }, + description: "Explicit null list value should be tracked with IsNull=true, not skipped", + }, + { + name: "empty list - not null", + variables: `{ + "filter": { + "tags": [], + "categories": ["cat1"] + } + }`, + expectedUsage: []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "String", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "tags"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, // Empty list is not null, field is still used + }, + { + NamedType: "String", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "categories"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, + }, + { + NamedType: "Int", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "scores"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: true, // Implicit null (missing) + }, + { + NamedType: "SearchFilter", + Path: []string{"SearchFilter"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, + }, + }, + description: "Empty list should track field usage with IsNull=false (field is used, just no elements)", + }, + { + name: "all lists null", + variables: `{ + "filter": { + "tags": null, + "categories": null, + "scores": null + } + }`, + expectedUsage: []graphqlmetricsv1.InputUsageInfo{ + { + NamedType: "String", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "tags"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: true, + }, + { + NamedType: "String", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "categories"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: true, + }, + { + NamedType: "Int", + TypeName: "SearchFilter", + Path: []string{"SearchFilter", "scores"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: true, + }, + { + NamedType: "SearchFilter", + Path: []string{"SearchFilter"}, + SubgraphIDs: []string{"search-subgraph"}, + IsNull: false, + }, + }, + description: "All null lists should be tracked with IsNull=true", + }, + } + + operation := ` + query SearchQuery($filter: SearchFilter!) { + search(filter: $filter) { + id + } + } + ` + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration( + "search-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"search"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Result", FieldNames: []string{"id"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "SearchQuery", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(tt.variables) + require.NoError(t, err) + + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + assert.Len(t, inputUsageInfo, len(tt.expectedUsage), tt.description) + for i := range tt.expectedUsage { + assert.JSONEq(t, prettyJSON(t, &tt.expectedUsage[i]), prettyJSON(t, inputUsageInfo[i]), + "inputUsageInfo[%d] - %s", i, tt.description) + } + }) + } +} + +// TestNestedFieldArguments verifies that arguments on nested fields (not just root Query fields) +// are tracked correctly with proper type names, paths, and subgraph IDs. +// This is critical for tracking schema usage on fields like User.friends(limit: Int) or +// Product.reviews(filter: ReviewFilter). +func TestNestedFieldArguments(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + user(id: ID!): User + product(id: ID!): Product + } + + type User { + id: ID! + name: String! + friends(limit: Int, offset: Int, filter: FriendFilter): [User!]! + posts(status: PostStatus, category: String): [Post!]! + } + + type Post { + id: ID! + title: String! + comments(first: Int!, after: String, includeReplies: Boolean): [Comment!]! + } + + type Comment { + id: ID! + text: String! + replies(maxDepth: Int): [Comment!]! + } + + type Product { + id: ID! + name: String! + reviews(filter: ReviewFilter!): [Review!]! + } + + type Review { + id: ID! + rating: Int! + author: User + } + + input FriendFilter { + minAge: Int + maxAge: Int + } + + input ReviewFilter { + minRating: Int + verified: Boolean + } + + enum PostStatus { + DRAFT + PUBLISHED + ARCHIVED + } + ` + + t.Run("nested arguments at multiple levels", func(t *testing.T) { + operation := ` + query GetUserContent($userId: ID!, $postStatus: PostStatus, $commentLimit: Int!, $includeReplies: Boolean) { + user(id: $userId) { + id + name + friends(limit: 10, offset: 0) { + id + name + } + posts(status: $postStatus, category: "tech") { + id + title + comments(first: $commentLimit, includeReplies: $includeReplies) { + id + text + replies(maxDepth: 3) { + id + text + } + } + } + } + } + ` + + variables := `{ + "userId": "123", + "postStatus": "PUBLISHED", + "commentLimit": 20, + "includeReplies": true + }` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "main-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"user", "product"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "User", FieldNames: []string{"id", "name", "friends", "posts"}}, + {TypeName: "Post", FieldNames: []string{"id", "title", "comments"}}, + {TypeName: "Comment", FieldNames: []string{"id", "text", "replies"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "GetUserContent", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Build a map for easier assertion + argumentMap := make(map[string]*graphqlmetricsv1.ArgumentUsageInfo) + for _, arg := range argumentUsageInfo { + key := strings.Join(arg.Path, ".") + argumentMap[key] = arg + } + + // Verify root level argument (Query.user.id) + require.Contains(t, argumentMap, "user.id", "Should track root level argument") + assert.Equal(t, "Query", argumentMap["user.id"].TypeName) + assert.Equal(t, "ID", argumentMap["user.id"].NamedType) + assert.False(t, argumentMap["user.id"].IsNull) + + // Verify nested level 1 argument (User.friends.limit) + require.Contains(t, argumentMap, "friends.limit", "Should track nested field argument") + assert.Equal(t, "User", argumentMap["friends.limit"].TypeName) + assert.Equal(t, "Int", argumentMap["friends.limit"].NamedType) + assert.False(t, argumentMap["friends.limit"].IsNull) + + // Verify nested level 1 argument (User.friends.offset) + require.Contains(t, argumentMap, "friends.offset", "Should track nested field argument") + assert.Equal(t, "User", argumentMap["friends.offset"].TypeName) + assert.Equal(t, "Int", argumentMap["friends.offset"].NamedType) + assert.False(t, argumentMap["friends.offset"].IsNull) + + // Verify nested level 1 implicit null argument (User.friends.filter) + require.Contains(t, argumentMap, "friends.filter", "Should track implicit null nested field argument") + assert.Equal(t, "User", argumentMap["friends.filter"].TypeName) + assert.Equal(t, "FriendFilter", argumentMap["friends.filter"].NamedType) + assert.True(t, argumentMap["friends.filter"].IsNull, "filter was not provided, should be implicitly null") + + // Verify nested level 1 argument (User.posts.status) + require.Contains(t, argumentMap, "posts.status", "Should track nested field argument with variable") + assert.Equal(t, "User", argumentMap["posts.status"].TypeName) + assert.Equal(t, "PostStatus", argumentMap["posts.status"].NamedType) + assert.False(t, argumentMap["posts.status"].IsNull) + + // Verify nested level 1 argument (User.posts.category) + require.Contains(t, argumentMap, "posts.category", "Should track nested field argument with inline value") + assert.Equal(t, "User", argumentMap["posts.category"].TypeName) + assert.Equal(t, "String", argumentMap["posts.category"].NamedType) + assert.False(t, argumentMap["posts.category"].IsNull) + + // Verify nested level 2 argument (Post.comments.first) + require.Contains(t, argumentMap, "comments.first", "Should track doubly nested field argument") + assert.Equal(t, "Post", argumentMap["comments.first"].TypeName) + assert.Equal(t, "Int", argumentMap["comments.first"].NamedType) + assert.False(t, argumentMap["comments.first"].IsNull) + + // Verify nested level 2 argument (Post.comments.includeReplies) + require.Contains(t, argumentMap, "comments.includeReplies", "Should track doubly nested field argument") + assert.Equal(t, "Post", argumentMap["comments.includeReplies"].TypeName) + assert.Equal(t, "Boolean", argumentMap["comments.includeReplies"].NamedType) + assert.False(t, argumentMap["comments.includeReplies"].IsNull) + + // Verify nested level 2 implicit null argument (Post.comments.after) + require.Contains(t, argumentMap, "comments.after", "Should track implicit null doubly nested argument") + assert.Equal(t, "Post", argumentMap["comments.after"].TypeName) + assert.Equal(t, "String", argumentMap["comments.after"].NamedType) + assert.True(t, argumentMap["comments.after"].IsNull, "after was not provided, should be implicitly null") + + // Verify nested level 3 argument (Comment.replies.maxDepth) + require.Contains(t, argumentMap, "replies.maxDepth", "Should track triply nested field argument") + assert.Equal(t, "Comment", argumentMap["replies.maxDepth"].TypeName) + assert.Equal(t, "Int", argumentMap["replies.maxDepth"].NamedType) + assert.False(t, argumentMap["replies.maxDepth"].IsNull) + + // Verify all arguments have correct subgraph IDs + for key, arg := range argumentMap { + assert.Equal(t, []string{"main-subgraph"}, arg.SubgraphIDs, "Argument %s should have main-subgraph", key) + } + }) + + t.Run("nested arguments with input object types", func(t *testing.T) { + operation := ` + query GetUserFriends($userId: ID!, $friendFilter: FriendFilter) { + user(id: $userId) { + id + friends(filter: $friendFilter, limit: 5) { + id + name + } + } + } + ` + + variables := `{ + "userId": "123", + "friendFilter": { + "minAge": 18, + "maxAge": 65 + } + }` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "main-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"user"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "User", FieldNames: []string{"id", "name", "friends"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "GetUserFriends", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Build maps for easier assertion + argumentMap := make(map[string]*graphqlmetricsv1.ArgumentUsageInfo) + for _, arg := range argumentUsageInfo { + key := strings.Join(arg.Path, ".") + argumentMap[key] = arg + } + + inputMap := make(map[string]*graphqlmetricsv1.InputUsageInfo) + for _, input := range inputUsageInfo { + key := strings.Join(input.Path, ".") + inputMap[key] = input + } + + // Verify nested argument with input object type + require.Contains(t, argumentMap, "friends.filter", "Should track nested argument with input type") + filterArg := argumentMap["friends.filter"] + assert.Equal(t, "User", filterArg.TypeName) + assert.Equal(t, "FriendFilter", filterArg.NamedType) + assert.False(t, filterArg.IsNull) + + // Verify nested argument with scalar type + require.Contains(t, argumentMap, "friends.limit", "Should track nested argument with scalar type") + limitArg := argumentMap["friends.limit"] + assert.Equal(t, "User", limitArg.TypeName) + assert.Equal(t, "Int", limitArg.NamedType) + assert.False(t, limitArg.IsNull) + + // Verify implicit null for missing offset argument + require.Contains(t, argumentMap, "friends.offset", "Should track implicit null for nested argument") + offsetArg := argumentMap["friends.offset"] + assert.Equal(t, "User", offsetArg.TypeName) + assert.Equal(t, "Int", offsetArg.NamedType) + assert.True(t, offsetArg.IsNull, "offset was not provided, should be implicitly null") + + // Verify input usage for the filter input object + require.Contains(t, inputMap, "FriendFilter", "Should track FriendFilter input type") + assert.Equal(t, "FriendFilter", inputMap["FriendFilter"].NamedType) + assert.False(t, inputMap["FriendFilter"].IsNull) + + // Verify input fields + require.Contains(t, inputMap, "FriendFilter.minAge", "Should track FriendFilter.minAge field") + assert.Equal(t, "Int", inputMap["FriendFilter.minAge"].NamedType) + assert.Equal(t, "FriendFilter", inputMap["FriendFilter.minAge"].TypeName) + assert.False(t, inputMap["FriendFilter.minAge"].IsNull) + + require.Contains(t, inputMap, "FriendFilter.maxAge", "Should track FriendFilter.maxAge field") + assert.Equal(t, "Int", inputMap["FriendFilter.maxAge"].NamedType) + assert.Equal(t, "FriendFilter", inputMap["FriendFilter.maxAge"].TypeName) + assert.False(t, inputMap["FriendFilter.maxAge"].IsNull) + }) + + t.Run("nested arguments with null input object", func(t *testing.T) { + operation := ` + query GetUserFriends($userId: ID!, $friendFilter: FriendFilter) { + user(id: $userId) { + id + friends(filter: $friendFilter) { + id + name + } + } + } + ` + + variables := `{ + "userId": "123", + "friendFilter": null + }` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration[any]( + "main-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"user"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "User", FieldNames: []string{"id", "name", "friends"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "GetUserFriends", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Build map for argument assertion + argumentMap := make(map[string]*graphqlmetricsv1.ArgumentUsageInfo) + for _, arg := range argumentUsageInfo { + key := strings.Join(arg.Path, ".") + argumentMap[key] = arg + } + + // Verify nested argument with null input object + require.Contains(t, argumentMap, "friends.filter", "Should track nested argument even when null") + filterArg := argumentMap["friends.filter"] + assert.Equal(t, "User", filterArg.TypeName) + assert.Equal(t, "FriendFilter", filterArg.NamedType) + assert.True(t, filterArg.IsNull, "filter variable is explicitly null") + + // Verify input usage tracks the null FriendFilter + var friendFilterUsage *graphqlmetricsv1.InputUsageInfo + for _, input := range inputUsageInfo { + if input.NamedType == "FriendFilter" && len(input.Path) == 1 { + friendFilterUsage = input + break + } + } + require.NotNil(t, friendFilterUsage, "Should track FriendFilter input type even when null") + assert.Equal(t, "FriendFilter", friendFilterUsage.NamedType) + assert.True(t, friendFilterUsage.IsNull, "FriendFilter should be tracked as null") + }) + + t.Run("nested arguments across multiple subgraphs", func(t *testing.T) { + // Enhanced schema with more types that span multiple subgraphs + multiSubgraphSchema := ` + schema { + query: Query + } + + type Query { + user(id: ID!): User + product(id: ID!): Product + order(id: ID!): Order + } + + type User { + id: ID! + name: String! + friends(limit: Int, filter: UserFilter): [User!]! + orders(status: OrderStatus, limit: Int): [Order!]! + } + + type Product { + id: ID! + name: String! + reviews(filter: ReviewFilter!, limit: Int): [Review!]! + } + + type Review { + id: ID! + rating: Int! + author: User + comments(first: Int, sortBy: String): [ReviewComment!]! + } + + type ReviewComment { + id: ID! + text: String! + } + + type Order { + id: ID! + status: OrderStatus! + items(category: String): [OrderItem!]! + customer: User + } + + type OrderItem { + id: ID! + product: Product + quantity: Int! + } + + input UserFilter { + minAge: Int + verified: Boolean + } + + input ReviewFilter { + minRating: Int + verified: Boolean + } + + enum OrderStatus { + PENDING + SHIPPED + DELIVERED + } + ` + + operation := ` + query GetUserDataAcrossSubgraphs($userId: ID!, $userFilter: UserFilter, $reviewFilter: ReviewFilter!, $orderStatus: OrderStatus) { + user(id: $userId) { + id + name + friends(limit: 10, filter: $userFilter) { + id + name + } + orders(status: $orderStatus, limit: 5) { + id + status + items(category: "electronics") { + id + quantity + product { + id + name + reviews(filter: $reviewFilter, limit: 3) { + id + rating + comments(first: 5, sortBy: "date") { + id + text + } + } + } + } + } + } + } + ` + + variables := `{ + "userId": "user-123", + "userFilter": { + "minAge": 18, + "verified": true + }, + "reviewFilter": { + "minRating": 4, + "verified": true + }, + "orderStatus": "SHIPPED" + }` + + def, rep := astparser.ParseGraphqlDocumentString(multiSubgraphSchema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + // Create THREE subgraphs - users, products, and orders come from different sources + usersSubgraph, err := plan.NewDataSourceConfiguration[any]( + "users-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"user"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "User", FieldNames: []string{"id", "name", "friends", "orders"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + productsSubgraph, err := plan.NewDataSourceConfiguration[any]( + "products-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"product"}}, + {TypeName: "Product", FieldNames: []string{"id", "name", "reviews"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Product", FieldNames: []string{"id", "name", "reviews"}}, + {TypeName: "Review", FieldNames: []string{"id", "rating", "author", "comments"}}, + {TypeName: "ReviewComment", FieldNames: []string{"id", "text"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + ordersSubgraph, err := plan.NewDataSourceConfiguration[any]( + "orders-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"order"}}, + {TypeName: "Order", FieldNames: []string{"id", "status", "items", "customer"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Order", FieldNames: []string{"id", "status", "items", "customer"}}, + {TypeName: "OrderItem", FieldNames: []string{"id", "product", "quantity"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{usersSubgraph, productsSubgraph, ordersSubgraph}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "GetUserDataAcrossSubgraphs", report) + require.False(t, report.HasErrors()) + + vars, err := astjson.Parse(variables) + require.NoError(t, err) + + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + inputUsageInfo, err := GetInputUsageInfo(&op, &def, vars, generatedPlan, nil) + require.NoError(t, err) + + // Build map for argument assertion + argumentMap := make(map[string]*graphqlmetricsv1.ArgumentUsageInfo) + for _, arg := range argumentUsageInfo { + key := strings.Join(arg.Path, ".") + argumentMap[key] = arg + } + + // Build map for input assertion + inputMap := make(map[string]*graphqlmetricsv1.InputUsageInfo) + for _, input := range inputUsageInfo { + key := strings.Join(input.Path, ".") + inputMap[key] = input + } + + // ======================================== + // Verify USERS SUBGRAPH arguments + // ======================================== + + // Root level: Query.user(id:) -> users-subgraph + require.Contains(t, argumentMap, "user.id", "Should track Query.user(id:)") + assert.Equal(t, "Query", argumentMap["user.id"].TypeName) + assert.Equal(t, "ID", argumentMap["user.id"].NamedType) + assert.Equal(t, []string{"users-subgraph"}, argumentMap["user.id"].SubgraphIDs, + "Query.user argument should be attributed to users-subgraph") + assert.False(t, argumentMap["user.id"].IsNull) + + // Nested level 1: User.friends(limit:) -> users-subgraph + require.Contains(t, argumentMap, "friends.limit", "Should track User.friends(limit:)") + assert.Equal(t, "User", argumentMap["friends.limit"].TypeName) + assert.Equal(t, "Int", argumentMap["friends.limit"].NamedType) + assert.Equal(t, []string{"users-subgraph"}, argumentMap["friends.limit"].SubgraphIDs, + "User.friends.limit argument should be attributed to users-subgraph") + assert.False(t, argumentMap["friends.limit"].IsNull) + + // Nested level 1: User.friends(filter:) -> users-subgraph (input object type) + require.Contains(t, argumentMap, "friends.filter", "Should track User.friends(filter:)") + assert.Equal(t, "User", argumentMap["friends.filter"].TypeName) + assert.Equal(t, "UserFilter", argumentMap["friends.filter"].NamedType) + assert.Equal(t, []string{"users-subgraph"}, argumentMap["friends.filter"].SubgraphIDs, + "User.friends.filter argument should be attributed to users-subgraph") + assert.False(t, argumentMap["friends.filter"].IsNull) + + // Nested level 1: User.orders(status:) -> users-subgraph + require.Contains(t, argumentMap, "orders.status", "Should track User.orders(status:)") + assert.Equal(t, "User", argumentMap["orders.status"].TypeName) + assert.Equal(t, "OrderStatus", argumentMap["orders.status"].NamedType) + assert.Equal(t, []string{"users-subgraph"}, argumentMap["orders.status"].SubgraphIDs, + "User.orders.status argument should be attributed to users-subgraph") + assert.False(t, argumentMap["orders.status"].IsNull) + + // Nested level 1: User.orders(limit:) -> users-subgraph + require.Contains(t, argumentMap, "orders.limit", "Should track User.orders(limit:)") + assert.Equal(t, "User", argumentMap["orders.limit"].TypeName) + assert.Equal(t, "Int", argumentMap["orders.limit"].NamedType) + assert.Equal(t, []string{"users-subgraph"}, argumentMap["orders.limit"].SubgraphIDs, + "User.orders.limit argument should be attributed to users-subgraph") + assert.False(t, argumentMap["orders.limit"].IsNull) + + // ======================================== + // Verify ORDERS SUBGRAPH arguments + // ======================================== + + // Nested level 2: Order.items(category:) -> orders-subgraph + require.Contains(t, argumentMap, "items.category", "Should track Order.items(category:)") + assert.Equal(t, "Order", argumentMap["items.category"].TypeName) + assert.Equal(t, "String", argumentMap["items.category"].NamedType) + assert.Equal(t, []string{"orders-subgraph"}, argumentMap["items.category"].SubgraphIDs, + "Order.items.category argument should be attributed to orders-subgraph") + assert.False(t, argumentMap["items.category"].IsNull) + + // ======================================== + // Verify PRODUCTS SUBGRAPH arguments + // ======================================== + + // Nested level 4: Product.reviews(filter:) -> products-subgraph + require.Contains(t, argumentMap, "reviews.filter", "Should track Product.reviews(filter:)") + assert.Equal(t, "Product", argumentMap["reviews.filter"].TypeName) + assert.Equal(t, "ReviewFilter", argumentMap["reviews.filter"].NamedType) + assert.Equal(t, []string{"products-subgraph"}, argumentMap["reviews.filter"].SubgraphIDs, + "Product.reviews.filter argument should be attributed to products-subgraph") + assert.False(t, argumentMap["reviews.filter"].IsNull) + + // Nested level 4: Product.reviews(limit:) -> products-subgraph + require.Contains(t, argumentMap, "reviews.limit", "Should track Product.reviews(limit:)") + assert.Equal(t, "Product", argumentMap["reviews.limit"].TypeName) + assert.Equal(t, "Int", argumentMap["reviews.limit"].NamedType) + assert.Equal(t, []string{"products-subgraph"}, argumentMap["reviews.limit"].SubgraphIDs, + "Product.reviews.limit argument should be attributed to products-subgraph") + assert.False(t, argumentMap["reviews.limit"].IsNull) + + // Nested level 5: Review.comments(first:) -> products-subgraph + require.Contains(t, argumentMap, "comments.first", "Should track Review.comments(first:)") + assert.Equal(t, "Review", argumentMap["comments.first"].TypeName) + assert.Equal(t, "Int", argumentMap["comments.first"].NamedType) + assert.Equal(t, []string{"products-subgraph"}, argumentMap["comments.first"].SubgraphIDs, + "Review.comments.first argument should be attributed to products-subgraph") + assert.False(t, argumentMap["comments.first"].IsNull) + + // Nested level 5: Review.comments(sortBy:) -> products-subgraph + require.Contains(t, argumentMap, "comments.sortBy", "Should track Review.comments(sortBy:)") + assert.Equal(t, "Review", argumentMap["comments.sortBy"].TypeName) + assert.Equal(t, "String", argumentMap["comments.sortBy"].NamedType) + assert.Equal(t, []string{"products-subgraph"}, argumentMap["comments.sortBy"].SubgraphIDs, + "Review.comments.sortBy argument should be attributed to products-subgraph") + assert.False(t, argumentMap["comments.sortBy"].IsNull) + + // ======================================== + // Verify INPUT TYPE subgraph attribution + // ======================================== + + // UserFilter should be attributed to users-subgraph (used by User.friends) + require.Contains(t, inputMap, "UserFilter", "Should track UserFilter input type") + assert.Equal(t, "UserFilter", inputMap["UserFilter"].NamedType) + assert.Equal(t, []string{"users-subgraph"}, inputMap["UserFilter"].SubgraphIDs, + "UserFilter should be attributed to users-subgraph") + assert.False(t, inputMap["UserFilter"].IsNull) + + // UserFilter.minAge field + require.Contains(t, inputMap, "UserFilter.minAge", "Should track UserFilter.minAge field") + assert.Equal(t, "Int", inputMap["UserFilter.minAge"].NamedType) + assert.Equal(t, "UserFilter", inputMap["UserFilter.minAge"].TypeName) + assert.Equal(t, []string{"users-subgraph"}, inputMap["UserFilter.minAge"].SubgraphIDs, + "UserFilter.minAge should be attributed to users-subgraph") + + // UserFilter.verified field + require.Contains(t, inputMap, "UserFilter.verified", "Should track UserFilter.verified field") + assert.Equal(t, "Boolean", inputMap["UserFilter.verified"].NamedType) + assert.Equal(t, "UserFilter", inputMap["UserFilter.verified"].TypeName) + assert.Equal(t, []string{"users-subgraph"}, inputMap["UserFilter.verified"].SubgraphIDs, + "UserFilter.verified should be attributed to users-subgraph") + + // ReviewFilter should be attributed to products-subgraph (used by Product.reviews) + require.Contains(t, inputMap, "ReviewFilter", "Should track ReviewFilter input type") + assert.Equal(t, "ReviewFilter", inputMap["ReviewFilter"].NamedType) + assert.Equal(t, []string{"products-subgraph"}, inputMap["ReviewFilter"].SubgraphIDs, + "ReviewFilter should be attributed to products-subgraph") + assert.False(t, inputMap["ReviewFilter"].IsNull) + + // ReviewFilter.minRating field + require.Contains(t, inputMap, "ReviewFilter.minRating", "Should track ReviewFilter.minRating field") + assert.Equal(t, "Int", inputMap["ReviewFilter.minRating"].NamedType) + assert.Equal(t, "ReviewFilter", inputMap["ReviewFilter.minRating"].TypeName) + assert.Equal(t, []string{"products-subgraph"}, inputMap["ReviewFilter.minRating"].SubgraphIDs, + "ReviewFilter.minRating should be attributed to products-subgraph") + + // ReviewFilter.verified field + require.Contains(t, inputMap, "ReviewFilter.verified", "Should track ReviewFilter.verified field") + assert.Equal(t, "Boolean", inputMap["ReviewFilter.verified"].NamedType) + assert.Equal(t, "ReviewFilter", inputMap["ReviewFilter.verified"].TypeName) + assert.Equal(t, []string{"products-subgraph"}, inputMap["ReviewFilter.verified"].SubgraphIDs, + "ReviewFilter.verified should be attributed to products-subgraph") + + // ======================================== + // Verify ENUM usage subgraph attribution + // ======================================== + + // OrderStatus enum used by User.orders should be attributed to users-subgraph + var orderStatusUsage *graphqlmetricsv1.InputUsageInfo + for _, input := range inputUsageInfo { + if input.NamedType == "OrderStatus" && len(input.EnumValues) > 0 { + orderStatusUsage = input + break + } + } + require.NotNil(t, orderStatusUsage, "Should track OrderStatus enum usage") + assert.Equal(t, []string{"users-subgraph"}, orderStatusUsage.SubgraphIDs, + "OrderStatus enum should be attributed to users-subgraph (used by User.orders)") + assert.Contains(t, orderStatusUsage.EnumValues, "SHIPPED") + + // ======================================== + // Verify NO CROSS-CONTAMINATION + // ======================================== + + // Ensure users-subgraph arguments don't have products-subgraph or orders-subgraph + for key, arg := range argumentMap { + if arg.TypeName == "User" { + assert.NotContains(t, arg.SubgraphIDs, "products-subgraph", + "User field argument %s should not have products-subgraph", key) + assert.NotContains(t, arg.SubgraphIDs, "orders-subgraph", + "User field argument %s should not have orders-subgraph", key) + } + if arg.TypeName == "Product" || arg.TypeName == "Review" { + assert.NotContains(t, arg.SubgraphIDs, "users-subgraph", + "Product/Review field argument %s should not have users-subgraph", key) + assert.NotContains(t, arg.SubgraphIDs, "orders-subgraph", + "Product/Review field argument %s should not have orders-subgraph", key) + } + if arg.TypeName == "Order" || arg.TypeName == "OrderItem" { + assert.NotContains(t, arg.SubgraphIDs, "users-subgraph", + "Order/OrderItem field argument %s should not have users-subgraph", key) + assert.NotContains(t, arg.SubgraphIDs, "products-subgraph", + "Order/OrderItem field argument %s should not have products-subgraph", key) + } + } + }) +} + +// TestNilVariablesHandling verifies that nil variables are handled gracefully without panicking. +// This is a defensive test to ensure the API doesn't crash when callers pass nil for variables. +func TestNilVariablesHandling(t *testing.T) { + schema := ` + schema { + query: Query + } + + type Query { + findEmployees(criteria: SearchInput): [Employee!]! + } + + type Employee { + id: ID! + } + + input SearchInput { + department: String + minAge: Int + } + ` + + operation := ` + query FindEmployees($criteria: SearchInput) { + findEmployees(criteria: $criteria) { + id + } + } + ` + + def, rep := astparser.ParseGraphqlDocumentString(schema) + require.False(t, rep.HasErrors()) + op, rep := astparser.ParseGraphqlDocumentString(operation) + require.False(t, rep.HasErrors()) + err := asttransform.MergeDefinitionWithBaseSchema(&def) + require.NoError(t, err) + + report := &operationreport.Report{} + norm := astnormalization.NewNormalizer(true, true) + norm.NormalizeOperation(&op, &def, report) + require.False(t, report.HasErrors()) + + valid := astvalidation.DefaultOperationValidator() + valid.Validate(&op, &def, report) + require.False(t, report.HasErrors()) + + dsCfg, err := plan.NewDataSourceConfiguration( + "employees-subgraph", + &FakeFactory[any]{upstreamSchema: &def}, + &plan.DataSourceMetadata{ + RootNodes: []plan.TypeField{ + {TypeName: "Query", FieldNames: []string{"findEmployees"}}, + }, + ChildNodes: []plan.TypeField{ + {TypeName: "Employee", FieldNames: []string{"id"}}, + }, + }, + nil, + ) + require.NoError(t, err) + + planner, err := plan.NewPlanner(plan.Configuration{ + DisableResolveFieldPositions: true, + DataSources: []plan.DataSource{dsCfg}, + }) + require.NoError(t, err) + + generatedPlan := planner.Plan(&op, &def, "FindEmployees", report) + require.False(t, report.HasErrors()) + + // Test with nil variables - should not panic + t.Run("nil variables for GetInputUsageInfo", func(t *testing.T) { + inputUsageInfo, err := GetInputUsageInfo(&op, &def, nil, generatedPlan, nil) + require.NoError(t, err) + + // Should track SearchInput as implicitly null since variable not provided + var searchInputUsage *graphqlmetricsv1.InputUsageInfo + for _, input := range inputUsageInfo { + if input.NamedType == "SearchInput" && len(input.Path) == 1 { + searchInputUsage = input + break + } + } + + require.NotNil(t, searchInputUsage, "Should track SearchInput even with nil variables") + assert.Equal(t, "SearchInput", searchInputUsage.NamedType) + assert.True(t, searchInputUsage.IsNull, "SearchInput should be null when variables is nil") + assert.Equal(t, []string{"employees-subgraph"}, searchInputUsage.SubgraphIDs) + }) + + t.Run("nil variables for GetArgumentUsageInfo", func(t *testing.T) { + // Should not panic + argumentUsageInfo, err := GetArgumentUsageInfo(&op, &def, nil, generatedPlan, nil) + require.NoError(t, err) + + // Should track the criteria argument + require.Len(t, argumentUsageInfo, 1) + assert.Equal(t, "SearchInput", argumentUsageInfo[0].NamedType) + assert.Equal(t, []string{"findEmployees", "criteria"}, argumentUsageInfo[0].Path) + // With nil variables, we can't determine if the variable value is null + // so IsNull will be false (default behavior when variable can't be resolved) + assert.False(t, argumentUsageInfo[0].IsNull) + }) +} + func prettyJSON(t *testing.T, v interface{}) string { b, err := json.MarshalIndent(v, "", " ") require.NoError(t, err)