diff --git a/.github/renovate.json5 b/.github/renovate.json5 index 74840301..4ccbaf68 100644 --- a/.github/renovate.json5 +++ b/.github/renovate.json5 @@ -1,11 +1,19 @@ { - $schema: "https://docs.renovatebot.com/renovate-schema.json", + extends: [ + "github>apollographql/renovate-config-apollo-open-source:default.json5", + "github>Turbo87/renovate-config//rust/updateToolchain", + ], packageRules: [ { enabled: false, matchPackageNames: ["*"], }, ], + // Automating Nix upgrades is currently in beta and opt-in only. + // https://docs.renovatebot.com/modules/manager/nix/ + nix: { + enabled: true, + }, vulnerabilityAlerts: { enabled: true, }, diff --git a/.github/workflows/sync-develop.yml b/.github/workflows/sync-develop.yml index dde7bd03..8e1e7fbe 100644 --- a/.github/workflows/sync-develop.yml +++ b/.github/workflows/sync-develop.yml @@ -48,34 +48,39 @@ jobs: HEAD_BRANCH: ${{ (github.event_name == 'workflow_dispatch' && inputs.head_branch) || 'main' }} BASE_BRANCH: ${{ (github.event_name == 'workflow_dispatch' && inputs.base_branch) || 'develop' }} SOURCE_PR: ${{ (github.event_name == 'pull_request' && github.event.pull_request.number) || inputs.source_pr_number || '' }} + GH_TOKEN: ${{ secrets.GH_PAT }} steps: - uses: actions/checkout@v4 with: fetch-depth: 0 + token: ${{ secrets.GH_PAT }} + + - name: Configure git author + run: | + git config --local user.name "Apollo Bot" + git config --local user.email "svc-apollo-bot-2@apollographql.com" # Generate branch name from PR# when available, otherwise use first 7 commit SHA characters - name: Compute branch/name metadata id: meta run: | - if [ -n "${SOURCE_PR}" ]; then - echo "branch=sync/${HEAD_BRANCH}-into-${BASE_BRANCH}-pr-${SOURCE_PR}" >> $GITHUB_OUTPUT - echo "title=Sync ${HEAD_BRANCH} → ${BASE_BRANCH} (PR #${SOURCE_PR})" >> $GITHUB_OUTPUT - echo "body=Auto-opened to merge \`${HEAD_BRANCH}\` into \`${BASE_BRANCH}\`. Source PR: #${SOURCE_PR}." >> $GITHUB_OUTPUT - else - short_sha=${GITHUB_SHA::7} - echo "branch=sync/${HEAD_BRANCH}-into-${BASE_BRANCH}-${short_sha}" >> $GITHUB_OUTPUT - echo "title=Sync ${HEAD_BRANCH} → ${BASE_BRANCH} (${short_sha})" >> $GITHUB_OUTPUT - echo "body=Auto-opened to merge \`${HEAD_BRANCH}\` into \`${BASE_BRANCH}\` at \`${GITHUB_SHA}\`." >> $GITHUB_OUTPUT - fi + pr=${{ github.event.pull_request.number }} + echo "sync_branch=sync/main-into-develop-pr-${pr}" >> $GITHUB_OUTPUT + echo "title_sync=Sync main → develop (PR #${pr})" >> $GITHUB_OUTPUT + echo "body_sync=Auto-opened after merging \`${{ github.event.pull_request.head.ref }}\` into \`main\`. Source PR: #${pr}." >> $GITHUB_OUTPUT + echo "conflict_branch=conflict/main-into-develop-pr-${pr}" >> $GITHUB_OUTPUT + echo "title_conflict=Sync main → develop (resolve conflicts)" >> $GITHUB_OUTPUT + echo "body_conflict=Opened from a copy of \`main\` so conflicts can be resolved without pushing to a protected branch." >> $GITHUB_OUTPUT # Short-lived sync branch from develop and merge main into it (do NOT rebase) # use +e to stop errors from short-circuiting the script - name: Prepare sync branch id: prep run: | + set -e git fetch origin "${BASE_BRANCH}" "${HEAD_BRANCH}" - git switch -c "${{ steps.meta.outputs.branch }}" "origin/${BASE_BRANCH}" + git switch -c "${{ steps.meta.outputs.sync_branch }}" "origin/${BASE_BRANCH}" set +e git merge --no-ff "origin/${HEAD_BRANCH}" rc=$? @@ -83,34 +88,103 @@ jobs: git add -A || true git commit -m "WIP: merge ${HEAD_BRANCH} into ${BASE_BRANCH} via ${{ steps.meta.outputs.branch }}" || true git push origin HEAD + + right=$(git rev-list --count --right-only "origin/${BASE_BRANCH}...HEAD") + echo "merge_status=$rc" >> "$GITHUB_OUTPUT" + echo "sync_right=$right" >> "$GITHUB_OUTPUT" + echo "Merge exit=$rc, sync branch ahead-by=$right" - # Open the PR targeting develop - - name: Open PR to develop - id: syncpr - uses: peter-evans/create-pull-request@v6 - with: - branch: ${{ steps.meta.outputs.branch }} - base: ${{ env.BASE_BRANCH }} - title: ${{ steps.meta.outputs.title }} - body: | - ${{ steps.meta.outputs.body }} + # If no merge conflicts and there are changes, open the PR targeting develop + - name: Open clean PR to develop + id: sync_pr + if: ${{ steps.prep.outputs.merge_status == '0' && steps.prep.outputs.sync_right != '0' }} + run: | + # Avoid duplicate PRs + existing=$(gh pr list --base "${BASE_BRANCH}" --head "${{ steps.meta.outputs.sync_branch }}" --state open --json number --jq '.[0].number' || true) + if [ -n "$existing" ] && [ "$existing" != "null" ]; then + echo "pr_number=$existing" >> "$GITHUB_OUTPUT" + url=$(gh pr view "$existing" --json url --jq .url) + echo "pr_url=$url" >> "$GITHUB_OUTPUT" + exit 0 + fi - Merge status: ${{ steps.prep.outputs.merge_status == '0' && 'clean ✅' || 'conflicts ❗' }} - labels: ${{ steps.prep.outputs.merge_status == '0' && 'back-merge,automation' || 'back-merge,automation,conflicts' }} + gh pr create \ + --base "${BASE_BRANCH}" \ + --head "${{ steps.meta.outputs.sync_branch }}" \ + --title "${{ steps.meta.outputs.sync_title }}" \ + --body "${{ steps.meta.outputs.sync_body }} (created via gh CLI)" \ + --label back-merge \ + --label automation + + # Emit outputs for later steps + gh pr view --base "${BASE_BRANCH}" --head "${{ steps.meta.outputs.sync_branch }}" \ + --json number,url | jq -r '"pr_number=\(.number)\npr_url=\(.url)"' >> "$GITHUB_OUTPUT" + + # If the merge hit conflicts, open a DIRECT PR: HEAD_BRANCH -> BASE_BRANCH so conflicts can be resolved prior to merge + - name: Open conflict PR + id: conflict_pr + if: ${{ steps.prep.outputs.merge_status != '0' }} + run: | + set -e + git fetch origin "${HEAD_BRANCH}" "${BASE_BRANCH}" + + git switch -c "${{ steps.meta.outputs.conflict_branch }}" "origin/${HEAD_BRANCH}" + git push -u origin HEAD + + # Skip if no diff between conflict branch and base (should be unlikely) + right=$(git rev-list --right-only --count "origin/${BASE_BRANCH}...origin/${{ steps.meta.outputs.conflict_branch }}") + if [ "$right" -eq 0 ]; then + echo "No diff between ${HEAD_BRANCH} and ${BASE_BRANCH}; nothing to open." + exit 0 + fi + + # Reuse existing open PR if present + existing=$(gh pr list --base "${BASE_BRANCH}" --head "${{ steps.meta.outputs.conflict_branch }}" --state open --json number --jq '.[0].number' || true) + if [ -n "$existing" ] && [ "$existing" != "null" ]; then + echo "pr_number=$existing" >> "$GITHUB_OUTPUT" + url=$(gh pr view "$existing" --json url --jq .url) + echo "pr_url=$url" >> "$GITHUB_OUTPUT" + exit 0 + fi + + gh pr create \ + --base "${BASE_BRANCH}" \ + --head "${{ steps.meta.outputs.conflict_branch }}" \ + --title "${{ steps.meta.outputs.title_conflict }}" \ + --body "${{ steps.meta.outputs.body_conflict }}" \ + --label back-merge \ + --label automation \ + --label conflicts + + gh pr view --base "${BASE_BRANCH}" --head "${{ steps.meta.outputs.conflict_branch }}" \ + --json number,url | jq -r '"pr_number=\(.number)\npr_url=\(.url)"' >> "$GITHUB_OUTPUT" # Comment back on the ORIGINAL merged PR with a link to the sync PR - name: Comment on source PR with sync PR link - if: github.event_name == 'pull_request' && steps.syncpr.outputs.pull-request-number != '' + if: ${{ env.SOURCE_PR != '' && (steps.sync_pr.outputs.pr_number != '' || steps.conflict_pr.outputs.pr_number != '') }} uses: actions/github-script@v7 with: script: | + const owner = context.repo.owner; + const repo = context.repo.repo; const issue_number = Number(process.env.SOURCE_PR); - const syncUrl = `${{ toJson(steps.syncpr.outputs['pull-request-url']) }}`.replace(/^"|"$/g, ''); - const body = `Opened sync PR **${process.env.HEAD_BRANCH} → ${process.env.BASE_BRANCH}**: ${syncUrl}`; - await github.rest.issues.createComment({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number, - body, - }); + + const hadConflicts = '${{ steps.prep.outputs.merge_status }}' !== '0'; + const syncUrl = '${{ steps.sync_pr.outputs.pr_url || steps.conflict_pr.outputs.pr_url }}'; + const head = process.env.HEAD_BRANCH; + const base = process.env.BASE_BRANCH; + + const status = hadConflicts ? 'conflicts ❗' : 'clean ✅'; + const note = hadConflicts + ? 'Opened from a copy of main so conflicts can be resolved safely.' + : 'Opened from a sync branch created off develop.'; + + const body = [ + `Opened sync PR **${head} → ${base}**: ${syncUrl}`, + ``, + `Merge status: **${status}**`, + note + ].join('\n'); + + await github.rest.issues.createComment({ owner, repo, issue_number, body }); \ No newline at end of file diff --git a/.github/workflows/verify-changeset.yml b/.github/workflows/verify-changeset.yml index 648de58b..bad4a44e 100644 --- a/.github/workflows/verify-changeset.yml +++ b/.github/workflows/verify-changeset.yml @@ -4,6 +4,8 @@ on: branches-ignore: - main - release/** + - conflict/* + - sync/* paths-ignore: - '.github/**' - '.cargo/**' @@ -17,7 +19,7 @@ on: jobs: verify-changeset: - if: ${{ !contains(github.event.pull_request.labels.*.name, 'skip-changeset') }} + if: ${{ !contains(github.event.pull_request.labels.*.name, 'skip-changeset') && !startsWith(github.head_ref, 'sync/') && !startsWith(github.head_ref, 'conflict/') }} name: Verify runs-on: ubuntu-24.04 permissions: diff --git a/CHANGELOG.md b/CHANGELOG.md index fbae685f..598dafb7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,16 @@ All notable changes to this project will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +# [0.7.3] - 2025-08-25 + +## 🐛 Fixes + +### fix: generate openAI-compatible json schemas for list types - @DaleSeo PR #272 + +The MCP server is generating JSON schemas that don't match OpenAI's function calling specification. It puts `oneOf` at the array level instead of using `items` to define the JSON schemas for the GraphQL list types. While some other LLMs are more flexible about this, it technically violates the [JSON Schema specification](https://json-schema.org/understanding-json-schema/reference/array) that OpenAI strictly follows. + +This PR updates the list type handling logic to move `oneOf` inside `items` for GraphQL list types. + # [0.7.2] - 2025-08-19 ## 🚀 Features diff --git a/Cargo.lock b/Cargo.lock index d9571bdd..83ee7849 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -174,7 +174,7 @@ dependencies = [ [[package]] name = "apollo-mcp-registry" -version = "0.7.2" +version = "0.7.3" dependencies = [ "derive_more", "educe", @@ -202,7 +202,7 @@ dependencies = [ [[package]] name = "apollo-mcp-server" -version = "0.7.2" +version = "0.7.3" dependencies = [ "anyhow", "apollo-compiler", @@ -255,7 +255,7 @@ dependencies = [ [[package]] name = "apollo-schema-index" -version = "0.7.2" +version = "0.7.3" dependencies = [ "apollo-compiler", "enumset", diff --git a/Cargo.toml b/Cargo.toml index ee061227..841c849a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -8,7 +8,7 @@ members = [ [workspace.package] authors = ["Apollo "] -version = "0.7.2" +version = "0.7.3" [workspace.dependencies] apollo-compiler = "1.27.0" diff --git a/crates/apollo-mcp-server/src/graphql.rs b/crates/apollo-mcp-server/src/graphql.rs index 5bd69833..cab09c62 100644 --- a/crates/apollo-mcp-server/src/graphql.rs +++ b/crates/apollo-mcp-server/src/graphql.rs @@ -187,7 +187,7 @@ mod test { "extensions": { "clientLibrary": { "name":"mcp", - "version":"0.7.2" + "version":"0.7.3" } }, "operationName":"mock_operation" @@ -233,7 +233,7 @@ mod test { }, "clientLibrary": { "name":"mcp", - "version":"0.7.2" + "version":"0.7.3" } }, }) diff --git a/crates/apollo-mcp-server/src/operations.rs b/crates/apollo-mcp-server/src/operations.rs index 736e372f..7cec37e0 100644 --- a/crates/apollo-mcp-server/src/operations.rs +++ b/crates/apollo-mcp-server/src/operations.rs @@ -1044,25 +1044,40 @@ fn type_to_schema( custom_scalar_map, definitions, ); + let items_schema = if list_type.is_non_null() { + inner_type_schema + } else { + schema_factory( + None, + None, + None, + None, + Some(SubschemaValidation { + one_of: Some(vec![ + inner_type_schema, + Schema::Object(SchemaObject { + instance_type: Some(SingleOrVec::Single(Box::new( + InstanceType::Null, + ))), + ..Default::default() + }), + ]), + ..Default::default() + }), + None, + ) + }; + schema_factory( None, Some(InstanceType::Array), None, - list_type.is_non_null().then(|| ArrayValidation { - items: Some(SingleOrVec::Single(Box::new(inner_type_schema.clone()))), - ..Default::default() - }), - (!list_type.is_non_null()).then(|| SubschemaValidation { - one_of: Some(vec![ - inner_type_schema, - Schema::Object(SchemaObject { - instance_type: Some(SingleOrVec::Single(Box::new(InstanceType::Null))), - ..Default::default() - }), - ]), + Some(ArrayValidation { + items: Some(SingleOrVec::Single(Box::new(items_schema))), ..Default::default() }), None, + None, ) } } @@ -1272,7 +1287,7 @@ mod tests { .unwrap() .unwrap(); - insta::assert_debug_snapshot!(operation, @r###" + insta::assert_debug_snapshot!(operation, @r#" Operation { tool: Tool { name: "MutationName", @@ -1304,7 +1319,7 @@ mod tests { }, operation_name: "MutationName", } - "###); + "#); } #[test] @@ -1326,7 +1341,7 @@ mod tests { .unwrap() .unwrap(); - insta::assert_debug_snapshot!(operation, @r###" + insta::assert_debug_snapshot!(operation, @r#" Operation { tool: Tool { name: "MutationName", @@ -1358,7 +1373,7 @@ mod tests { }, operation_name: "MutationName", } - "###); + "#); } #[test] @@ -1381,7 +1396,7 @@ mod tests { .unwrap(); let tool = Tool::from(operation); - insta::assert_debug_snapshot!(tool, @r###" + insta::assert_debug_snapshot!(tool, @r#" Tool { name: "QueryName", description: Some( @@ -1403,7 +1418,7 @@ mod tests { }, ), } - "###); + "#); insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" { "type": "object", @@ -1432,7 +1447,7 @@ mod tests { .unwrap(); let tool = Tool::from(operation); - insta::assert_debug_snapshot!(tool, @r###" + insta::assert_debug_snapshot!(tool, @r#" Tool { name: "QueryName", description: Some( @@ -1458,8 +1473,8 @@ mod tests { }, ), } - "###); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r###" + "#); + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" { "type": "object", "properties": { @@ -1468,7 +1483,7 @@ mod tests { } } } - "###); + "#); } #[test] @@ -1491,7 +1506,7 @@ mod tests { .unwrap(); let tool = Tool::from(operation); - insta::assert_debug_snapshot!(tool, @r###" + insta::assert_debug_snapshot!(tool, @r#" Tool { name: "QueryName", description: Some( @@ -1520,8 +1535,8 @@ mod tests { }, ), } - "###); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r###" + "#); + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" { "type": "object", "required": [ @@ -1533,7 +1548,7 @@ mod tests { } } } - "###); + "#); } #[test] @@ -1556,7 +1571,7 @@ mod tests { .unwrap(); let tool = Tool::from(operation); - insta::assert_debug_snapshot!(tool, @r###" + insta::assert_debug_snapshot!(tool, @r#" Tool { name: "QueryName", description: Some( @@ -1570,14 +1585,16 @@ mod tests { "properties": Object { "id": Object { "type": String("array"), - "oneOf": Array [ - Object { - "type": String("string"), - }, - Object { - "type": String("null"), - }, - ], + "items": Object { + "oneOf": Array [ + Object { + "type": String("string"), + }, + Object { + "type": String("null"), + }, + ], + }, }, }, }, @@ -1593,8 +1610,8 @@ mod tests { }, ), } - "###); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r###" + "#); + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" { "type": "object", "required": [ @@ -1603,18 +1620,20 @@ mod tests { "properties": { "id": { "type": "array", - "oneOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] + "items": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + } } } } - "###); + "#); } #[test] @@ -1637,7 +1656,7 @@ mod tests { .unwrap(); let tool = Tool::from(operation); - insta::assert_debug_snapshot!(tool, @r###" + insta::assert_debug_snapshot!(tool, @r#" Tool { name: "QueryName", description: Some( @@ -1669,8 +1688,8 @@ mod tests { }, ), } - "###); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r###" + "#); + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" { "type": "object", "required": [ @@ -1685,7 +1704,7 @@ mod tests { } } } - "###); + "#); } #[test] @@ -1708,7 +1727,7 @@ mod tests { .unwrap(); let tool = Tool::from(operation); - insta::assert_debug_snapshot!(tool, @r###" + insta::assert_debug_snapshot!(tool, @r#" Tool { name: "QueryName", description: Some( @@ -1719,14 +1738,16 @@ mod tests { "properties": Object { "id": Object { "type": String("array"), - "oneOf": Array [ - Object { - "type": String("string"), - }, - Object { - "type": String("null"), - }, - ], + "items": Object { + "oneOf": Array [ + Object { + "type": String("string"), + }, + Object { + "type": String("null"), + }, + ], + }, }, }, }, @@ -1742,25 +1763,27 @@ mod tests { }, ), } - "###); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r###" + "#); + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" { "type": "object", "properties": { "id": { "type": "array", - "oneOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] + "items": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + } } } } - "###); + "#); } #[test] @@ -1783,7 +1806,7 @@ mod tests { .unwrap(); let tool = Tool::from(operation); - insta::assert_debug_snapshot!(tool, @r###" + insta::assert_debug_snapshot!(tool, @r#" Tool { name: "QueryName", description: Some( @@ -1812,8 +1835,8 @@ mod tests { }, ), } - "###); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r###" + "#); + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" { "type": "object", "properties": { @@ -1825,7 +1848,7 @@ mod tests { } } } - "###); + "#); } #[test] @@ -1848,7 +1871,7 @@ mod tests { .unwrap(); let tool = Tool::from(operation); - insta::assert_debug_snapshot!(tool, @r###" + insta::assert_debug_snapshot!(tool, @r#" Tool { name: "QueryName", description: Some( @@ -1859,22 +1882,26 @@ mod tests { "properties": Object { "id": Object { "type": String("array"), - "oneOf": Array [ - Object { - "type": String("array"), - "oneOf": Array [ - Object { - "type": String("string"), - }, - Object { - "type": String("null"), + "items": Object { + "oneOf": Array [ + Object { + "type": String("array"), + "items": Object { + "oneOf": Array [ + Object { + "type": String("string"), + }, + Object { + "type": String("null"), + }, + ], }, - ], - }, - Object { - "type": String("null"), - }, - ], + }, + Object { + "type": String("null"), + }, + ], + }, }, }, }, @@ -1890,33 +1917,37 @@ mod tests { }, ), } - "###); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r###" + "#); + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" { "type": "object", "properties": { "id": { "type": "array", - "oneOf": [ - { - "type": "array", - "oneOf": [ - { - "type": "string" - }, - { - "type": "null" + "items": { + "oneOf": [ + { + "type": "array", + "items": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] } - ] - }, - { - "type": "null" - } - ] + }, + { + "type": "null" + } + ] + } } } } - "###); + "#); } #[test] @@ -1939,7 +1970,7 @@ mod tests { .unwrap(); let tool = Tool::from(operation); - insta::assert_debug_snapshot!(tool, @r###" + insta::assert_debug_snapshot!(tool, @r##" Tool { name: "QueryName", description: Some( @@ -1983,7 +2014,7 @@ mod tests { }, ), } - "###); + "##); } #[test] @@ -2006,7 +2037,7 @@ mod tests { .unwrap(); let tool = Tool::from(operation); - insta::assert_debug_snapshot!(tool, @r###" + insta::assert_debug_snapshot!(tool, @r##" Tool { name: "QueryName", description: Some( @@ -2045,7 +2076,7 @@ mod tests { }, ), } - "###); + "##); } #[test] @@ -2064,7 +2095,7 @@ mod tests { false, false, ); - insta::assert_debug_snapshot!(operation, @r###" + insta::assert_debug_snapshot!(operation, @r#" Err( TooManyOperations { source_path: Some( @@ -2073,7 +2104,7 @@ mod tests { count: 2, }, ) - "###); + "#); } #[test] @@ -2123,7 +2154,7 @@ mod tests { false, false, ); - insta::assert_debug_snapshot!(operation, @r###" + insta::assert_debug_snapshot!(operation, @r#" Err( NoOperations { source_path: Some( @@ -2131,7 +2162,7 @@ mod tests { ), }, ) - "###); + "#); } #[test] @@ -2150,13 +2181,13 @@ mod tests { false, false, ); - insta::assert_debug_snapshot!(operation, @r###" + insta::assert_debug_snapshot!(operation, @r" Err( NoOperations { source_path: None, }, ) - "###); + "); } #[test] @@ -2190,7 +2221,7 @@ mod tests { .ok_or("Expected warning about unknown type in logs".to_string()) }); - insta::assert_debug_snapshot!(tool, @r###" + insta::assert_debug_snapshot!(tool, @r#" Tool { name: "QueryName", description: Some( @@ -2214,7 +2245,7 @@ mod tests { }, ), } - "###); + "#); } #[test] @@ -2248,7 +2279,7 @@ mod tests { .ok_or("Expected warning about custom scalar without map in logs".to_string()) }); - insta::assert_debug_snapshot!(tool, @r###" + insta::assert_debug_snapshot!(tool, @r##" Tool { name: "QueryName", description: Some( @@ -2279,7 +2310,7 @@ mod tests { }, ), } - "###); + "##); } #[test] @@ -2317,7 +2348,7 @@ mod tests { .ok_or("Expected warning about custom scalar missing in logs".to_string()) }); - insta::assert_debug_snapshot!(tool, @r###" + insta::assert_debug_snapshot!(tool, @r##" Tool { name: "QueryName", description: Some( @@ -2348,7 +2379,7 @@ mod tests { }, ), } - "###); + "##); } #[test] @@ -2374,7 +2405,7 @@ mod tests { .unwrap(); let tool = Tool::from(operation); - insta::assert_debug_snapshot!(tool, @r###" + insta::assert_debug_snapshot!(tool, @r##" Tool { name: "QueryName", description: Some( @@ -2406,7 +2437,7 @@ mod tests { }, ), } - "###); + "##); } #[test] @@ -2557,7 +2588,7 @@ mod tests { insta::assert_snapshot!( operation.tool.description.unwrap(), - @r###" + @r#" Get a list of A The returned value is an array of type `A` --- @@ -2609,7 +2640,7 @@ mod tests { type Z { zzz: Int } - "### + "# ); } @@ -2644,7 +2675,7 @@ mod tests { insta::assert_snapshot!( operation.tool.description.unwrap(), - @r###"Overridden tool #description"### + @"Overridden tool #description" ); } @@ -2677,7 +2708,7 @@ mod tests { insta::assert_snapshot!( operation.tool.description.unwrap(), - @r###"The returned value is optional and has type `String`"### + @"The returned value is optional and has type `String`" ); } @@ -2702,11 +2733,11 @@ mod tests { insta::assert_snapshot!( operation.tool.description.unwrap(), - @r###" - The returned value is optional and has type `String` - --- - The returned value is optional and has type `RealEnum` - "### + @r" + The returned value is optional and has type `String` + --- + The returned value is optional and has type `RealEnum` + " ); } @@ -2731,15 +2762,16 @@ mod tests { insta::assert_snapshot!( operation.tool.description.unwrap(), - @r###" - """the description for the enum""" - enum RealEnum { - """ENUM_VALUE_1 is a value""" - ENUM_VALUE_1 - """ENUM_VALUE_2 is a value""" - ENUM_VALUE_2 - } - "### + @r#" + --- + """the description for the enum""" + enum RealEnum { + """ENUM_VALUE_1 is a value""" + ENUM_VALUE_1 + """ENUM_VALUE_2 is a value""" + ENUM_VALUE_2 + } + "# ); } @@ -2764,7 +2796,7 @@ mod tests { insta::assert_snapshot!( operation.tool.description.unwrap(), - @r###""### + @"" ); } @@ -2811,7 +2843,7 @@ mod tests { .unwrap() .unwrap(); - insta::assert_debug_snapshot!(operation.tool, @r###" + insta::assert_debug_snapshot!(operation.tool, @r##" Tool { name: "Test", description: Some( @@ -2854,7 +2886,7 @@ mod tests { }, ), } - "###); + "##); } #[test] @@ -2880,7 +2912,7 @@ mod tests { .unwrap(); let tool = Tool::from(operation); - insta::assert_debug_snapshot!(tool, @r###" + insta::assert_debug_snapshot!(tool, @r#" Tool { name: "QueryName", description: Some( @@ -2906,7 +2938,7 @@ mod tests { }, ), } - "###); + "#); } #[test] @@ -2930,7 +2962,7 @@ mod tests { .unwrap(); let tool = Tool::from(operation); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r###" + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" { "type": "object", "properties": { @@ -2940,7 +2972,7 @@ mod tests { } } } - "###); + "#); } #[test] @@ -3000,7 +3032,7 @@ mod tests { .unwrap(); let tool = Tool::from(operation); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r###" + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" { "type": "object", "properties": { @@ -3014,7 +3046,7 @@ mod tests { } } } - "###); + "#); } #[test] @@ -3076,7 +3108,7 @@ mod tests { .unwrap(); let tool = Tool::from(operation); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r###" + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" { "type": "object", "properties": { @@ -3086,7 +3118,7 @@ mod tests { } } } - "###); + "#); } #[test] @@ -3109,7 +3141,7 @@ mod tests { .unwrap(); let tool = Tool::from(operation); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r###" + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" { "type": "object", "properties": { @@ -3119,7 +3151,7 @@ mod tests { } } } - "###); + "#); } #[test] @@ -3142,7 +3174,7 @@ mod tests { .unwrap(); let tool = Tool::from(operation); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r###" + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" { "type": "object", "properties": { @@ -3152,7 +3184,7 @@ mod tests { } } } - "###); + "#); } #[test] @@ -3175,7 +3207,7 @@ mod tests { .unwrap(); let tool = Tool::from(operation); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r###" + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" { "type": "object", "properties": { @@ -3189,7 +3221,7 @@ mod tests { } } } - "###); + "#); } #[test] @@ -3240,7 +3272,7 @@ mod tests { .unwrap(); let tool = Tool::from(operation); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r###" + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" { "type": "object", "properties": { @@ -3254,7 +3286,7 @@ mod tests { } } } - "###); + "#); } #[test] @@ -3277,11 +3309,233 @@ mod tests { .unwrap(); let tool = Tool::from(operation); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r###" + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" { "type": "object", "properties": {} } - "###); + "#); + } + + #[test] + fn nullable_list_of_nullable_input_objects() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($objects: [RealInputObject]) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r##" + Tool { + name: "QueryName", + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "objects": Object { + "type": String("array"), + "items": Object { + "oneOf": Array [ + Object { + "$ref": String("#/definitions/RealInputObject"), + }, + Object { + "type": String("null"), + }, + ], + }, + }, + }, + "definitions": Object { + "RealInputObject": Object { + "type": String("object"), + "required": Array [ + String("required"), + ], + "properties": Object { + "optional": Object { + "description": String("optional is a input field that is optional"), + "type": String("string"), + }, + "required": Object { + "description": String("required is a input field that is required"), + "type": String("string"), + }, + }, + }, + }, + }, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + } + "##); + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r##" + { + "type": "object", + "properties": { + "objects": { + "type": "array", + "items": { + "oneOf": [ + { + "$ref": "#/definitions/RealInputObject" + }, + { + "type": "null" + } + ] + } + } + }, + "definitions": { + "RealInputObject": { + "type": "object", + "required": [ + "required" + ], + "properties": { + "optional": { + "description": "optional is a input field that is optional", + "type": "string" + }, + "required": { + "description": "required is a input field that is required", + "type": "string" + } + } + } + } + } + "##); + } + + #[test] + fn non_nullable_list_of_non_nullable_input_objects() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($objects: [RealInputObject!]!) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r##" + Tool { + name: "QueryName", + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "required": Array [ + String("objects"), + ], + "properties": Object { + "objects": Object { + "type": String("array"), + "items": Object { + "$ref": String("#/definitions/RealInputObject"), + }, + }, + }, + "definitions": Object { + "RealInputObject": Object { + "type": String("object"), + "required": Array [ + String("required"), + ], + "properties": Object { + "optional": Object { + "description": String("optional is a input field that is optional"), + "type": String("string"), + }, + "required": Object { + "description": String("required is a input field that is required"), + "type": String("string"), + }, + }, + }, + }, + }, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + } + "##); + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r##" + { + "type": "object", + "required": [ + "objects" + ], + "properties": { + "objects": { + "type": "array", + "items": { + "$ref": "#/definitions/RealInputObject" + } + } + }, + "definitions": { + "RealInputObject": { + "type": "object", + "required": [ + "required" + ], + "properties": { + "optional": { + "description": "optional is a input field that is optional", + "type": "string" + }, + "required": { + "description": "required is a input field that is required", + "type": "string" + } + } + } + } + } + "##); } } diff --git a/docs/source/install.mdx b/docs/source/install.mdx index 86a1c281..537b2352 100644 --- a/docs/source/install.mdx +++ b/docs/source/install.mdx @@ -26,14 +26,14 @@ To download a **specific version** of Apollo MCP Server (recommended for CI envi ```bash # Note the `v` prefixing the version number -docker image pull ghcr.io/apollographql/apollo-mcp-server:v0.7.2 +docker image pull ghcr.io/apollographql/apollo-mcp-server:v0.7.3 ``` To download a specific version of Apollo MCP Server that is a release candidate: ```bash # Note the `v` prefixing the version number and the `-rc` suffix -docker image pull ghcr.io/apollographql/apollo-mcp-server:v0.7.2-rc.1 +docker image pull ghcr.io/apollographql/apollo-mcp-server:v0.7.3-rc.1 ``` @@ -65,7 +65,7 @@ To install or upgrade to a **specific version** of Apollo MCP Server (recommende ```bash # Note the `v` prefixing the version number -curl -sSL https://mcp.apollo.dev/download/nix/v0.7.2 | sh +curl -sSL https://mcp.apollo.dev/download/nix/v0.7.3 | sh ``` If your machine doesn't have the `curl` command, you can get the latest version from the [`curl` downloads page](https://curl.se/download.html). @@ -82,5 +82,5 @@ To install or upgrade to a **specific version** of Apollo MCP Server (recommende ```bash # Note the `v` prefixing the version number -iwr 'https://mcp.apollo.dev/download/win/v0.7.2' | iex +iwr 'https://mcp.apollo.dev/download/win/v0.7.3' | iex ``` diff --git a/scripts/nix/install.sh b/scripts/nix/install.sh index 3c7b7000..18b3c860 100755 --- a/scripts/nix/install.sh +++ b/scripts/nix/install.sh @@ -14,7 +14,7 @@ BINARY_DOWNLOAD_PREFIX="${APOLLO_MCP_SERVER_BINARY_DOWNLOAD_PREFIX:="https://git # Apollo MCP Server version defined in apollo-mcp-server's Cargo.toml # Note: Change this line manually during the release steps. -PACKAGE_VERSION="v0.7.2" +PACKAGE_VERSION="v0.7.3" download_binary_and_run_installer() { downloader --check diff --git a/scripts/windows/install.ps1 b/scripts/windows/install.ps1 index c3bf24c5..a05c0351 100644 --- a/scripts/windows/install.ps1 +++ b/scripts/windows/install.ps1 @@ -8,7 +8,7 @@ # Apollo MCP Server version defined in apollo-mcp-server's Cargo.toml # Note: Change this line manually during the release steps. -$package_version = 'v0.7.2' +$package_version = 'v0.7.3' function Install-Binary($apollo_mcp_server_install_args) { $old_erroractionpreference = $ErrorActionPreference