diff --git a/.github/renovate.json5 b/.github/renovate.json5 index 4ccbaf68..165aee89 100644 --- a/.github/renovate.json5 +++ b/.github/renovate.json5 @@ -1,22 +1,49 @@ { + // Allow for intellisense in editors + $schema: "https://docs.renovatebot.com/renovate-schema.json", + + // List of rules to apply extends: [ + // Recommended best practices from renovate itself + // See: https://docs.renovatebot.com/upgrade-best-practices/#whats-in-the-configbest-practices-preset + "config:best-practices", + + // Apply our own internal best practices + // See: https://github.com/apollographql/apollo-mcp-server/commits/main/.github/renovate.json5 "github>apollographql/renovate-config-apollo-open-source:default.json5", + + // Update to the latest rust stable version as it releases. + // See: https://github.com/Turbo87/renovate-config/blob/master/rust/updateToolchain.json "github>Turbo87/renovate-config//rust/updateToolchain", ], + + // Globally disable all automatic update PRs from renovate packageRules: [ { enabled: false, matchPackageNames: ["*"], }, ], + // Automating Nix upgrades is currently in beta and opt-in only. // https://docs.renovatebot.com/modules/manager/nix/ nix: { enabled: true, }, + + // Globally enable vulnerability alerts + // + // Note: This needs extra configuration at the repository level, which is described in the link + // below. + // + // See: https://docs.renovatebot.com/configuration-options/#vulnerabilityalerts vulnerabilityAlerts: { enabled: true, }, + + // Disable automatically updating lock files to latest versions once a week. + // + // See: https://docs.renovatebot.com/configuration-options/#lockfilemaintenance lockFileMaintenance: { enabled: false, }, diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6ba78966..fb42f641 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -28,7 +28,7 @@ jobs: actions: write contents: read steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: ref: ${{ github.event.pull_request.head.sha }} - uses: nixbuild/nix-quick-install-action@v30 @@ -55,7 +55,7 @@ jobs: actions: write contents: read steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: ref: ${{ github.event.pull_request.head.sha }} - uses: nixbuild/nix-quick-install-action@v30 @@ -81,7 +81,7 @@ jobs: actions: write contents: read steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: ref: ${{ github.event.pull_request.head.sha }} - uses: nixbuild/nix-quick-install-action@v30 @@ -107,7 +107,7 @@ jobs: actions: write contents: read steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: ref: ${{ github.event.pull_request.head.sha }} - uses: nixbuild/nix-quick-install-action@v30 @@ -124,3 +124,22 @@ jobs: gc-max-store-size: 5G - name: Run Tests run: 'nix develop --command bash -c "cargo test"' + + coverage: + name: Run Coverage + runs-on: ubuntu-24.04 + permissions: + contents: read + steps: + - uses: actions/checkout@v5 + with: + ref: ${{ github.event.pull_request.head.sha }} + - uses: taiki-e/install-action@cargo-llvm-cov + - name: Generate code coverage + run: cargo llvm-cov --all-features --workspace --codecov --output-path codecov.json + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v5 + with: + token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos + files: codecov.json + fail_ci_if_error: true diff --git a/.github/workflows/prep-release.yml b/.github/workflows/prep-release.yml index cb992199..e0ded069 100644 --- a/.github/workflows/prep-release.yml +++ b/.github/workflows/prep-release.yml @@ -47,7 +47,7 @@ jobs: GH_TOKEN: ${{ secrets.GH_PAT }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 diff --git a/.github/workflows/release-bins.yml b/.github/workflows/release-bins.yml index 3f808715..c73063b1 100644 --- a/.github/workflows/release-bins.yml +++ b/.github/workflows/release-bins.yml @@ -42,7 +42,7 @@ jobs: attestations: write id-token: write steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: ref: ${{ github.ref }} - uses: nixbuild/nix-quick-install-action@v30 diff --git a/.github/workflows/release-container.yml b/.github/workflows/release-container.yml index 79d5dd33..64bde965 100644 --- a/.github/workflows/release-container.yml +++ b/.github/workflows/release-container.yml @@ -32,7 +32,7 @@ jobs: attestations: write id-token: write steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: ref: ${{ github.ref }} diff --git a/.github/workflows/sync-develop.yml b/.github/workflows/sync-develop.yml index a1e1be28..79cbc72f 100644 --- a/.github/workflows/sync-develop.yml +++ b/.github/workflows/sync-develop.yml @@ -51,7 +51,7 @@ jobs: GH_TOKEN: ${{ secrets.GH_PAT }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 token: ${{ secrets.GH_PAT }} diff --git a/.github/workflows/verify-changeset.yml b/.github/workflows/verify-changeset.yml index bad4a44e..2bac53f0 100644 --- a/.github/workflows/verify-changeset.yml +++ b/.github/workflows/verify-changeset.yml @@ -1,6 +1,7 @@ name: Verify Changeset on: pull_request: + types: [opened, reopened, synchronize, ready_for_review] branches-ignore: - main - release/** @@ -19,7 +20,7 @@ on: jobs: verify-changeset: - if: ${{ !contains(github.event.pull_request.labels.*.name, 'skip-changeset') && !startsWith(github.head_ref, 'sync/') && !startsWith(github.head_ref, 'conflict/') }} + if: ${{ !contains(github.event.pull_request.labels.*.name, 'skip-changeset') && !startsWith(github.head_ref, 'sync/') && !startsWith(github.head_ref, 'conflict/') && !github.event.pull_request.draft }} name: Verify runs-on: ubuntu-24.04 permissions: diff --git a/CHANGELOG.md b/CHANGELOG.md index 87d061bc..2a67ebda 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,34 @@ All notable changes to this project will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +# [0.8.0] - 2025-09-12 + +## ๐Ÿš€ Features + +### feat: Configuration for disabling authorization token passthrough - @swcollard PR #336 + +A new optional new MCP Server configuration parameter, `transport.auth.disable_auth_token_passthrough`, which is `false` by default, that when true, will no longer pass through validated Auth tokens to the GraphQL API. + +## ๐Ÿ›  Maintenance + +### Configure Codecov with coverage targets - @DaleSeo PR #337 + +This PR adds `codecov.yml` to set up Codecov with specific coverage targets and quality standards. It helps define clear expectations for code quality. It also includes some documentation about code coverage in `CONTRIBUTING.md` and adds the Codecov badge to `README.md`. + +### Implement Test Coverage Measurement and Reporting - @DaleSeo PR #335 + +This PR adds the bare minimum for code coverage reporting using [cargo-llvm-cov](https://crates.io/crates/cargo-llvm-cov) and integrates with [Codecov](https://www.codecov.io/). It adds a new `coverage` job to the CI workflow that generates and uploads coverage reporting in parallel with existing tests. The setup mirrors that of Router, except it uses `nextest` instead of the built-in test runner and CircleCI instead of GitHub Actions. + +### chore: update RMCP dependency ([328](https://github.com/apollographql/apollo-mcp-server/issues/328)) + +Update the RMCP dependency to the latest version, pulling in newer specification changes. + +### ci: Pin stable rust version ([Issue #287](https://github.com/apollographql/apollo-mcp-server/issues/287)) + +Pins the stable version of Rust to the current latest version to ensure backwards compatibility with future versions. + + + # [0.7.5] - 2025-09-03 ## ๐Ÿ› Fixes diff --git a/CHANGELOG_SECTION.md b/CHANGELOG_SECTION.md new file mode 100644 index 00000000..139ce9fe --- /dev/null +++ b/CHANGELOG_SECTION.md @@ -0,0 +1,26 @@ +# [0.8.0] - 2025-09-12 + +## ๐Ÿš€ Features + +### feat: Configuration for disabling authorization token passthrough - @swcollard PR #336 + +A new optional new MCP Server configuration parameter, `transport.auth.disable_auth_token_passthrough`, which is `false` by default, that when true, will no longer pass through validated Auth tokens to the GraphQL API. + +## ๐Ÿ›  Maintenance + +### Configure Codecov with coverage targets - @DaleSeo PR #337 + +This PR adds `codecov.yml` to set up Codecov with specific coverage targets and quality standards. It helps define clear expectations for code quality. It also includes some documentation about code coverage in `CONTRIBUTING.md` and adds the Codecov badge to `README.md`. + +### Implement Test Coverage Measurement and Reporting - @DaleSeo PR #335 + +This PR adds the bare minimum for code coverage reporting using [cargo-llvm-cov](https://crates.io/crates/cargo-llvm-cov) and integrates with [Codecov](https://www.codecov.io/). It adds a new `coverage` job to the CI workflow that generates and uploads coverage reporting in parallel with existing tests. The setup mirrors that of Router, except it uses `nextest` instead of the built-in test runner and CircleCI instead of GitHub Actions. + +### chore: update RMCP dependency ([328](https://github.com/apollographql/apollo-mcp-server/issues/328)) + +Update the RMCP dependency to the latest version, pulling in newer specification changes. + +### ci: Pin stable rust version ([Issue #287](https://github.com/apollographql/apollo-mcp-server/issues/287)) + +Pins the stable version of Rust to the current latest version to ensure backwards compatibility with future versions. + diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c25c3623..cb3a2152 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -57,4 +57,21 @@ Itโ€™s important that every piece of code in Apollo packages is reviewed by at l 2. Simplicity. Is this the simplest way to achieve the intended goal? If there are too many files, redundant functions, or complex lines of code, suggest a simpler way to do the same thing. In particular, avoid implementing an overly general solution when a simple, small, and pragmatic fix will do. 3. Testing. Please make sure that the tests ensure that the code wonโ€™t break when other stuff change around it. The error messages in the test should help identify what is broken exactly and how. The tests should test every edge case if possible. Please make sure you get as much coverage as possible. 4. No unnecessary or unrelated changes. PRs shouldnโ€™t come with random formatting changes, especially in unrelated parts of the code. If there is some refactoring that needs to be done, it should be in a separate PR from a bug fix or feature, if possible. -5. Please run `cargo test`, `cargo clippy`, and `cargo fmt` prior to creating a PR. \ No newline at end of file +5. Please run `cargo test`, `cargo clippy`, and `cargo fmt` prior to creating a PR. + +### Code Coverage + +Apollo MCP Server uses comprehensive code coverage reporting to ensure code quality and test effectiveness. +The project uses [cargo-llvm-cov](https://crates.io/crates/cargo-llvm-cov) for generating code coverage reports and [Codecov](https://www.codecov.io/) for coverage analysis and reporting. Coverage is automatically generated and reported on every pull request through GitHub Actions. + +#### Coverage Targets + +The project maintains the following coverage targets, configured in `codecov.yml`: + +- **Project Coverage**: Automatically maintained - should increase overall coverage on each PR +- **Patch Coverage**: 80% - requires 80% coverage on all new/modified code + +These targets help ensure that: + +- The overall codebase coverage doesn't decrease over time +- New code is well-tested before being merged diff --git a/Cargo.lock b/Cargo.lock index a129845a..5da0ac42 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -26,6 +26,7 @@ dependencies = [ "cfg-if", "getrandom 0.3.3", "once_cell", + "serde", "version_check", "zerocopy", ] @@ -174,7 +175,7 @@ dependencies = [ [[package]] name = "apollo-mcp-registry" -version = "0.7.5" +version = "0.8.0" dependencies = [ "derive_more", "educe", @@ -202,7 +203,7 @@ dependencies = [ [[package]] name = "apollo-mcp-server" -version = "0.7.5" +version = "0.8.0" dependencies = [ "anyhow", "apollo-compiler", @@ -220,6 +221,7 @@ dependencies = [ "http", "humantime-serde", "insta", + "jsonschema", "jsonwebtoken", "jwks", "lz-str", @@ -228,7 +230,7 @@ dependencies = [ "reqwest", "rmcp", "rstest", - "schemars 1.0.4", + "schemars", "serde", "serde_json", "thiserror 2.0.14", @@ -255,7 +257,7 @@ dependencies = [ [[package]] name = "apollo-schema-index" -version = "0.7.5" +version = "0.8.0" dependencies = [ "apollo-compiler", "enumset", @@ -437,6 +439,21 @@ version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" +[[package]] +name = "bit-set" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" + [[package]] name = "bitflags" version = "1.3.2" @@ -492,6 +509,12 @@ dependencies = [ "syn 2.0.105", ] +[[package]] +name = "borrow-or-share" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3eeab4423108c5d7c744f4d234de88d18d636100093ae04caf4825134b9c3a32" + [[package]] name = "bstr" version = "1.12.0" @@ -622,7 +645,7 @@ version = "3.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fde0e0ec90c9dfb3b4b1a0891a7dcd0e2bffde2f7efed5fe7c9bb00e5bfb915e" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -796,7 +819,6 @@ dependencies = [ "ident_case", "proc-macro2", "quote", - "strsim", "syn 2.0.105", ] @@ -937,6 +959,15 @@ version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" +[[package]] +name = "email_address" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e079f19b08ca6239f47f8ba8509c11cf3ea30095831f7fed61441475edd8c449" +dependencies = [ + "serde", +] + [[package]] name = "encode_unicode" version = "1.0.0" @@ -1000,6 +1031,17 @@ dependencies = [ "windows-sys 0.60.2", ] +[[package]] +name = "fancy-regex" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf04c5ec15464ace8355a7b440a33aece288993475556d461154d7a62ad9947c" +dependencies = [ + "bit-set", + "regex-automata", + "regex-syntax", +] + [[package]] name = "fastdivide" version = "0.4.2" @@ -1044,6 +1086,17 @@ dependencies = [ "miniz_oxide", ] +[[package]] +name = "fluent-uri" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1918b65d96df47d3591bed19c5cca17e3fa5d0707318e4b5ef2eae01764df7e5" +dependencies = [ + "borrow-or-share", + "ref-cast", + "serde", +] + [[package]] name = "fnv" version = "1.0.7" @@ -1080,6 +1133,16 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "fraction" +version = "0.15.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f158e3ff0a1b334408dc9fb811cd99b446986f4d8b741bb08f9df1604085ae7" +dependencies = [ + "lazy_static", + "num", +] + [[package]] name = "fs4" version = "0.8.4" @@ -1754,7 +1817,7 @@ checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9" dependencies = [ "hermit-abi", "libc", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -1811,6 +1874,33 @@ dependencies = [ "thiserror 1.0.69", ] +[[package]] +name = "jsonschema" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d46662859bc5f60a145b75f4632fbadc84e829e45df6c5de74cfc8e05acb96b5" +dependencies = [ + "ahash", + "base64", + "bytecount", + "email_address", + "fancy-regex", + "fraction", + "idna", + "itoa", + "num-cmp", + "num-traits", + "once_cell", + "percent-encoding", + "referencing", + "regex", + "regex-syntax", + "reqwest", + "serde", + "serde_json", + "uuid-simd", +] + [[package]] name = "jsonwebtoken" version = "9.3.1" @@ -2139,6 +2229,20 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "num" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23" +dependencies = [ + "num-bigint", + "num-complex", + "num-integer", + "num-iter", + "num-rational", + "num-traits", +] + [[package]] name = "num-bigint" version = "0.4.6" @@ -2149,6 +2253,21 @@ dependencies = [ "num-traits", ] +[[package]] +name = "num-cmp" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63335b2e2c34fae2fb0aa2cecfd9f0832a1e24b3b32ecec612c3426d46dc8aaa" + +[[package]] +name = "num-complex" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495" +dependencies = [ + "num-traits", +] + [[package]] name = "num-conv" version = "0.1.0" @@ -2164,6 +2283,28 @@ dependencies = [ "num-traits", ] +[[package]] +name = "num-iter" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-rational" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824" +dependencies = [ + "num-bigint", + "num-integer", + "num-traits", +] + [[package]] name = "num-traits" version = "0.2.19" @@ -2274,6 +2415,12 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "outref" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a80800c0488c3a21695ea981a54918fbb37abf04f4d0720c453632255e2ff0e" + [[package]] name = "ownedbytes" version = "0.9.0" @@ -2644,6 +2791,20 @@ dependencies = [ "syn 2.0.105", ] +[[package]] +name = "referencing" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e9c261f7ce75418b3beadfb3f0eb1299fe8eb9640deba45ffa2cb783098697d" +dependencies = [ + "ahash", + "fluent-uri", + "once_cell", + "parking_lot", + "percent-encoding", + "serde_json", +] + [[package]] name = "regex" version = "1.11.1" @@ -2688,6 +2849,7 @@ dependencies = [ "async-compression", "base64", "bytes", + "futures-channel", "futures-core", "futures-util", "http", @@ -2734,9 +2896,9 @@ dependencies = [ [[package]] name = "rmcp" -version = "0.2.1" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37f2048a81a7ff7e8ef6bc5abced70c3d9114c8f03d85d7aaaafd9fd04f12e9e" +checksum = "41ab0892f4938752b34ae47cb53910b1b0921e55e77ddb6e44df666cab17939f" dependencies = [ "axum", "base64", @@ -2750,7 +2912,7 @@ dependencies = [ "pin-project-lite", "rand 0.9.2", "rmcp-macros", - "schemars 0.8.22", + "schemars", "serde", "serde_json", "sse-stream", @@ -2765,11 +2927,11 @@ dependencies = [ [[package]] name = "rmcp-macros" -version = "0.2.1" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72398e694b9f6dbb5de960cf158c8699e6a1854cb5bbaac7de0646b2005763c4" +checksum = "1827cd98dab34cade0513243c6fe0351f0f0b2c9d6825460bcf45b42804bdda0" dependencies = [ - "darling 0.20.11", + "darling 0.21.2", "proc-macro2", "quote", "serde_json", @@ -2865,7 +3027,7 @@ dependencies = [ "errno", "libc", "linux-raw-sys 0.4.15", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -2920,45 +3082,21 @@ dependencies = [ "windows-sys 0.59.0", ] -[[package]] -name = "schemars" -version = "0.8.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fbf2ae1b8bc8e02df939598064d22402220cd5bbcca1c76f7d6a310974d5615" -dependencies = [ - "chrono", - "dyn-clone", - "schemars_derive 0.8.22", - "serde", - "serde_json", -] - [[package]] name = "schemars" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0" dependencies = [ + "chrono", "dyn-clone", "ref-cast", - "schemars_derive 1.0.4", + "schemars_derive", "serde", "serde_json", "url", ] -[[package]] -name = "schemars_derive" -version = "0.8.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32e265784ad618884abaea0600a9adf15393368d840e0222d101a072f3f7534d" -dependencies = [ - "proc-macro2", - "quote", - "serde_derive_internals", - "syn 2.0.105", -] - [[package]] name = "schemars_derive" version = "1.0.4" @@ -3458,7 +3596,7 @@ dependencies = [ "getrandom 0.3.3", "once_cell", "rustix 1.0.8", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -3949,6 +4087,17 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "uuid-simd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23b082222b4f6619906941c17eb2297fff4c2fb96cb60164170522942a200bd8" +dependencies = [ + "outref", + "uuid", + "vsimd", +] + [[package]] name = "valuable" version = "0.1.1" @@ -3967,6 +4116,12 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" +[[package]] +name = "vsimd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c3082ca00d5a5ef149bb8b555a72ae84c9c59f7250f013ac822ac2e49b19c64" + [[package]] name = "walkdir" version = "2.5.0" @@ -4104,7 +4259,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 54fe14fd..c3c09c07 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -8,7 +8,11 @@ members = [ [workspace.package] authors = ["Apollo "] -version = "0.7.5" +edition = "2024" +license-file = "LICENSE" +repository = "https://github.com/apollographql/apollo-mcp-server" +rust-version = "1.89.0" +version = "0.8.0" [workspace.dependencies] apollo-compiler = "1.27.0" diff --git a/README.md b/README.md index 21718f02..196e5561 100644 --- a/README.md +++ b/README.md @@ -7,6 +7,7 @@ ![release binaries workflow status](https://img.shields.io/github/actions/workflow/status/apollographql/apollo-mcp-server/release-bins.yml?label=release%20binaries) ![release container workflow status](https://img.shields.io/github/actions/workflow/status/apollographql/apollo-mcp-server/release-container.yml?label=release%20container) ![license](https://img.shields.io/github/license/apollographql/apollo-mcp-server) +[![codecov](https://codecov.io/github/apollographql/apollo-mcp-server/graph/badge.svg?token=6NHuvZQ8ak)](https://codecov.io/github/apollographql/apollo-mcp-server) # Apollo MCP Server diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 00000000..eb8e5e8a --- /dev/null +++ b/codecov.yml @@ -0,0 +1,10 @@ +coverage: + status: + project: + default: + # Should increase overall coverage on each PR + target: auto + patch: + default: + # Require 80% coverage on all new/modified code + target: 80% diff --git a/crates/apollo-mcp-registry/Cargo.toml b/crates/apollo-mcp-registry/Cargo.toml index bf77a385..6a6c8565 100644 --- a/crates/apollo-mcp-registry/Cargo.toml +++ b/crates/apollo-mcp-registry/Cargo.toml @@ -1,10 +1,12 @@ [package] name = "apollo-mcp-registry" -version.workspace = true -edition = "2024" authors.workspace = true -license-file = "../LICENSE" -repository = "https://github.com/apollographql/apollo-mcp-server" +edition.workspace = true +license-file.workspace = true +repository.workspace = true +rust-version.workspace = true +version.workspace = true + description = "Registry providing schema and operations to the MCP Server" [dependencies] diff --git a/crates/apollo-mcp-registry/src/uplink.rs b/crates/apollo-mcp-registry/src/uplink.rs index 2e52e589..015fc172 100644 --- a/crates/apollo-mcp-registry/src/uplink.rs +++ b/crates/apollo-mcp-registry/src/uplink.rs @@ -402,12 +402,11 @@ where .send() .await .inspect_err(|e| { - if let Some(hyper_err) = e.source() { - if let Some(os_err) = hyper_err.source() { - if os_err.to_string().contains("tcp connect error: Cannot assign requested address (os error 99)") { - tracing::warn!("If your MCP server is executing within a kubernetes pod, this failure may be caused by istio-proxy injection. See https://github.com/apollographql/router/issues/3533 for more details about how to solve this"); - } - } + if let Some(hyper_err) = e.source() && + let Some(os_err) = hyper_err.source() && + os_err.to_string().contains("tcp connect error: Cannot assign requested address (os error 99)") + { + tracing::warn!("If your MCP server is executing within a kubernetes pod, this failure may be caused by istio-proxy injection. See https://github.com/apollographql/router/issues/3533 for more details about how to solve this"); } })?; tracing::debug!("uplink response {:?}", res); diff --git a/crates/apollo-mcp-registry/src/uplink/schema.rs b/crates/apollo-mcp-registry/src/uplink/schema.rs index b9fc9d4e..bd3c83a1 100644 --- a/crates/apollo-mcp-registry/src/uplink/schema.rs +++ b/crates/apollo-mcp-registry/src/uplink/schema.rs @@ -181,12 +181,6 @@ impl SchemaSource { } } -#[derive(thiserror::Error, Debug)] -enum FetcherError { - #[error("failed to build http client")] - InitializationError(#[from] reqwest::Error), -} - // Encapsulates fetching the schema from the first viable url. // It will try each url in order until it finds one that works. #[allow(clippy::unwrap_used)] // TODO - existing unwrap from router code diff --git a/crates/apollo-mcp-registry/src/uplink/schema/event.rs b/crates/apollo-mcp-registry/src/uplink/schema/event.rs index c987a946..1c132295 100644 --- a/crates/apollo-mcp-registry/src/uplink/schema/event.rs +++ b/crates/apollo-mcp-registry/src/uplink/schema/event.rs @@ -1,6 +1,7 @@ -use crate::uplink::schema::SchemaState; +use super::SchemaState; use std::fmt::Debug; use std::fmt::Formatter; +use std::fmt::Result; /// Schema events pub enum Event { @@ -12,7 +13,7 @@ pub enum Event { } impl Debug for Event { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + fn fmt(&self, f: &mut Formatter) -> Result { match self { Event::UpdateSchema(_) => { write!(f, "UpdateSchema()") @@ -23,3 +24,28 @@ impl Debug for Event { } } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_debug_event_no_more_schema() { + let event = Event::NoMoreSchema; + let output = format!("{:?}", event); + assert_eq!(output, "NoMoreSchema"); + } + + #[test] + fn test_debug_redacts_update_schema() { + let event = Event::UpdateSchema(SchemaState { + sdl: "type Query { hello: String }".to_string(), + launch_id: Some("test-launch-123".to_string()), + }); + + let output = format!("{:?}", event); + assert_eq!(output, "UpdateSchema()"); + assert!(!output.contains("type Query")); + assert!(!output.contains("test-launch-123")); + } +} diff --git a/crates/apollo-mcp-server/Cargo.toml b/crates/apollo-mcp-server/Cargo.toml index 0c99746d..3fe245b7 100644 --- a/crates/apollo-mcp-server/Cargo.toml +++ b/crates/apollo-mcp-server/Cargo.toml @@ -1,9 +1,12 @@ [package] name = "apollo-mcp-server" -version.workspace = true authors.workspace = true -edition = "2024" -license-file = "../LICENSE" +edition.workspace = true +license-file.workspace = true +repository.workspace = true +rust-version.workspace = true +version.workspace = true + default-run = "apollo-mcp-server" [dependencies] @@ -21,12 +24,13 @@ futures.workspace = true headers = "0.4.1" http = "1.3.1" humantime-serde = "1.1.1" +jsonschema = "0.33.0" jsonwebtoken = "9" jwks = "0.4.0" lz-str = "0.2.1" regex = "1.11.1" reqwest.workspace = true -rmcp = { version = "0.2", features = [ +rmcp = { version = "0.6", features = [ "server", "transport-io", "transport-sse-server", diff --git a/crates/apollo-mcp-server/src/auth.rs b/crates/apollo-mcp-server/src/auth.rs index fc1f4bd6..1c802828 100644 --- a/crates/apollo-mcp-server/src/auth.rs +++ b/crates/apollo-mcp-server/src/auth.rs @@ -46,6 +46,10 @@ pub struct Config { /// Supported OAuth scopes by this resource server pub scopes: Vec, + + /// Whether to disable the auth token passthrough to upstream API + #[serde(default)] + pub disable_auth_token_passthrough: bool, } impl Config { diff --git a/crates/apollo-mcp-server/src/custom_scalar_map.rs b/crates/apollo-mcp-server/src/custom_scalar_map.rs index 69cd820a..746e1b23 100644 --- a/crates/apollo-mcp-server/src/custom_scalar_map.rs +++ b/crates/apollo-mcp-server/src/custom_scalar_map.rs @@ -1,8 +1,6 @@ use crate::errors::ServerError; -use rmcp::{ - schemars::schema::{Schema, SchemaObject, SingleOrVec}, - serde_json, -}; +use rmcp::serde_json; +use schemars::Schema; use std::{collections::HashMap, path::PathBuf, str::FromStr}; impl FromStr for CustomScalarMap { @@ -14,26 +12,23 @@ impl FromStr for CustomScalarMap { serde_json::from_str(string_custom_scalar_file) .map_err(ServerError::CustomScalarConfig)?; - // Validate each of the values in the map and coerce into schemars::schema::SchemaObject + // Try to parse each as a schema let custom_scalar_map = parsed_custom_scalar_file .into_iter() .map(|(key, value)| { - let value_string = value.to_string(); - // The only way I could find to do this was to reparse it. - let schema: SchemaObject = serde_json::from_str(value_string.as_str()) - .map_err(ServerError::CustomScalarConfig)?; - - if has_invalid_schema(&Schema::Object(schema.clone())) { - Err(ServerError::CustomScalarJsonSchema(value)) - } else { - Ok((key, schema)) + // The schemars crate does not enforce schema validation anymore, so we use jsonschema + // to ensure that the supplied schema is valid. + if let Err(e) = jsonschema::meta::validate(&value) { + return Err(ServerError::CustomScalarJsonSchema(e.to_string())); } + + Schema::try_from(value.clone()) + .map(|schema| (key, schema)) + .map_err(|e| ServerError::CustomScalarJsonSchema(e.to_string())) }) .collect::>()?; - // panic!("hello2! {:?}", parsed_custom_scalar_file); - - Ok::<_, ServerError>(CustomScalarMap(custom_scalar_map)) + Ok(CustomScalarMap(custom_scalar_map)) } } @@ -49,44 +44,19 @@ impl TryFrom<&PathBuf> for CustomScalarMap { } #[derive(Debug, Clone)] -pub struct CustomScalarMap(HashMap); +pub struct CustomScalarMap(HashMap); impl CustomScalarMap { - pub fn get(&self, key: &str) -> Option<&SchemaObject> { + pub fn get(&self, key: &str) -> Option<&Schema> { self.0.get(key) } } -// Unknown keys will be put into "extensions" in the schema object, check for those and consider those invalid -fn has_invalid_schema(schema: &Schema) -> bool { - match schema { - Schema::Object(schema_object) => { - !schema_object.extensions.is_empty() - || schema_object - .object - .as_ref() - .is_some_and(|object| object.properties.values().any(has_invalid_schema)) - || schema_object.array.as_ref().is_some_and(|object| { - object.items.as_ref().is_some_and(|items| match items { - SingleOrVec::Single(item) => has_invalid_schema(item), - SingleOrVec::Vec(items) => items.iter().any(has_invalid_schema), - }) - }) - } - Schema::Bool(_) => false, - } -} - #[cfg(test)] mod tests { - use std::{ - collections::{BTreeMap, HashMap}, - str::FromStr, - }; + use std::{collections::HashMap, str::FromStr}; - use rmcp::schemars::schema::{ - InstanceType, ObjectValidation, Schema, SchemaObject, SingleOrVec, - }; + use schemars::json_schema; use crate::custom_scalar_map::CustomScalarMap; @@ -103,7 +73,8 @@ mod tests { #[test] fn only_spaces() { - let result = CustomScalarMap::from_str(" ").err().unwrap(); + let result = + CustomScalarMap::from_str(" ").expect_err("empty space should be valid schema"); insta::assert_debug_snapshot!(result, @r#" CustomScalarConfig( @@ -128,20 +99,17 @@ mod tests { let result = CustomScalarMap::from_str( r###"{ "custom": { - "test": true + "type": "bool" } }"###, ) - .err() - .unwrap(); + .expect_err("schema should have been invalid"); - insta::assert_debug_snapshot!(result, @r#" + insta::assert_debug_snapshot!(result, @r###" CustomScalarJsonSchema( - Object { - "test": Bool(true), - }, + "\"bool\" is not valid under any of the schemas listed in the 'anyOf' keyword", ) - "#) + "###) } #[test] @@ -152,25 +120,17 @@ mod tests { "type": "object", "properties": { "test": { - "test": true + "type": "obbbject" } } } }"###, ) - .err() - .unwrap(); + .expect_err("schema should have been invalid"); insta::assert_debug_snapshot!(result, @r#" CustomScalarJsonSchema( - Object { - "type": String("object"), - "properties": Object { - "test": Object { - "test": Bool(true), - }, - }, - }, + "\"obbbject\" is not valid under any of the schemas listed in the 'anyOf' keyword", ) "#) } @@ -196,31 +156,23 @@ mod tests { let expected_data = HashMap::from_iter([ ( "simple".to_string(), - SchemaObject { - instance_type: Some(SingleOrVec::Single(Box::new(InstanceType::String))), - ..Default::default() - }, + json_schema!({ + "type": "string", + }), ), ( "complex".to_string(), - SchemaObject { - instance_type: Some(SingleOrVec::Single(Box::new(InstanceType::Object))), - object: Some(Box::new(ObjectValidation { - properties: BTreeMap::from_iter([( - "name".to_string(), - Schema::Object(SchemaObject { - instance_type: Some(SingleOrVec::Single(Box::new( - InstanceType::String, - ))), - ..Default::default() - }), - )]), - ..Default::default() - })), - ..Default::default() - }, + json_schema!({ + "type": "object", + "properties": { + "name": { + "type": "string" + } + } + }), ), ]); + assert_eq!(result, expected_data); } } diff --git a/crates/apollo-mcp-server/src/errors.rs b/crates/apollo-mcp-server/src/errors.rs index e19dc152..caaa7f53 100644 --- a/crates/apollo-mcp-server/src/errors.rs +++ b/crates/apollo-mcp-server/src/errors.rs @@ -54,7 +54,7 @@ pub enum ServerError { GraphQLDocumentSchema(Box>), #[error("Federation error in GraphQL schema: {0}")] - Federation(FederationError), + Federation(Box), #[error("Invalid JSON: {0}")] Json(#[from] serde_json::Error), @@ -78,7 +78,7 @@ pub enum ServerError { CustomScalarConfig(serde_json::Error), #[error("invalid json schema: {0}")] - CustomScalarJsonSchema(serde_json::Value), + CustomScalarJsonSchema(String), #[error("Missing environment variable: {0}")] EnvironmentVariable(String), @@ -93,7 +93,7 @@ pub enum ServerError { StartupError(#[from] JoinError), #[error("Failed to initialize MCP server")] - McpInitializeError(#[from] rmcp::service::ServerInitializeError), + McpInitializeError(#[from] Box), #[error(transparent)] UrlParseError(ParseError), diff --git a/crates/apollo-mcp-server/src/explorer.rs b/crates/apollo-mcp-server/src/explorer.rs index d422e798..e0cbfe79 100644 --- a/crates/apollo-mcp-server/src/explorer.rs +++ b/crates/apollo-mcp-server/src/explorer.rs @@ -82,8 +82,10 @@ impl Explorer { let url = self.create_explorer_url(input)?; debug!(?url, input=?pretty, "Created URL to open operation in Apollo Explorer"); Ok(CallToolResult { - content: vec![Content::text(url)], + content: vec![Content::text(url.clone())], + meta: None, is_error: None, + structured_content: Some(Value::Array(vec![url.into()])), }) } } diff --git a/crates/apollo-mcp-server/src/graphql.rs b/crates/apollo-mcp-server/src/graphql.rs index 7d09b782..d47e61e2 100644 --- a/crates/apollo-mcp-server/src/graphql.rs +++ b/crates/apollo-mcp-server/src/graphql.rs @@ -107,6 +107,8 @@ pub trait Executable { .filter(|value| !matches!(value, Value::Null)) .is_none(), ), + meta: None, + structured_content: Some(json), }) } } diff --git a/crates/apollo-mcp-server/src/introspection/tools/introspect.rs b/crates/apollo-mcp-server/src/introspection/tools/introspect.rs index e7878f1d..436c89e3 100644 --- a/crates/apollo-mcp-server/src/introspection/tools/introspect.rs +++ b/crates/apollo-mcp-server/src/introspection/tools/introspect.rs @@ -73,6 +73,8 @@ impl Introspect { return Ok(CallToolResult { content: vec![], is_error: None, + meta: None, + structured_content: None, }); } } @@ -99,6 +101,9 @@ impl Introspect { .map(Content::text) .collect(), is_error: None, + meta: None, + // The content being returned is a raw string, so no need to create structured content for it + structured_content: None, }) } diff --git a/crates/apollo-mcp-server/src/introspection/tools/search.rs b/crates/apollo-mcp-server/src/introspection/tools/search.rs index 8ab6e808..3e146b63 100644 --- a/crates/apollo-mcp-server/src/introspection/tools/search.rs +++ b/crates/apollo-mcp-server/src/introspection/tools/search.rs @@ -167,6 +167,10 @@ impl Search { .map(Content::text) .collect(), is_error: None, + meta: None, + + // Note: The returned content is treated as text, so no need to structure its output + structured_content: None, }) } } diff --git a/crates/apollo-mcp-server/src/introspection/tools/validate.rs b/crates/apollo-mcp-server/src/introspection/tools/validate.rs index 17a66051..e104cc92 100644 --- a/crates/apollo-mcp-server/src/introspection/tools/validate.rs +++ b/crates/apollo-mcp-server/src/introspection/tools/validate.rs @@ -70,6 +70,11 @@ impl Validate { Ok(CallToolResult { content: vec![Content::text("Operation is valid")], is_error: None, + meta: None, + + // Note: We don't really return any meaningful content to the client here, so we can leave the + // structured content as none. + structured_content: None, }) } } diff --git a/crates/apollo-mcp-server/src/main.rs b/crates/apollo-mcp-server/src/main.rs index ae5102e6..3f3d8738 100644 --- a/crates/apollo-mcp-server/src/main.rs +++ b/crates/apollo-mcp-server/src/main.rs @@ -109,6 +109,8 @@ async fn main() -> anyhow::Result<()> { .then(|| config.graphos.graph_ref()) .transpose()?; + let transport = config.transport.clone(); + Ok(Server::builder() .transport(config.transport) .schema_source(schema_source) @@ -125,6 +127,15 @@ async fn main() -> anyhow::Result<()> { .mutation_mode(config.overrides.mutation_mode) .disable_type_description(config.overrides.disable_type_description) .disable_schema_description(config.overrides.disable_schema_description) + .disable_auth_token_passthrough(match transport { + apollo_mcp_server::server::Transport::Stdio => false, + apollo_mcp_server::server::Transport::SSE { auth, .. } => auth + .map(|a| a.disable_auth_token_passthrough) + .unwrap_or(false), + apollo_mcp_server::server::Transport::StreamableHttp { auth, .. } => auth + .map(|a| a.disable_auth_token_passthrough) + .unwrap_or(false), + }) .custom_scalar_map( config .custom_scalars diff --git a/crates/apollo-mcp-server/src/operations.rs b/crates/apollo-mcp-server/src/operations.rs index 7cec37e0..169c5379 100644 --- a/crates/apollo-mcp-server/src/operations.rs +++ b/crates/apollo-mcp-server/src/operations.rs @@ -1,3541 +1,15 @@ -use crate::custom_scalar_map::CustomScalarMap; -use crate::errors::{McpError, OperationError}; -use crate::event::Event; -use crate::graphql::{self, OperationDetails}; -use crate::schema_tree_shake::{DepthLimit, SchemaTreeShaker}; -use apollo_compiler::ast::{Document, OperationType, Selection}; -use apollo_compiler::schema::ExtendedType; -use apollo_compiler::validation::Valid; -use apollo_compiler::{ - Name, Node, Schema as GraphqlSchema, - ast::{Definition, OperationDefinition, Type}, - parser::Parser, -}; -use apollo_mcp_registry::files; -use apollo_mcp_registry::platform_api::operation_collections::collection_poller::{ - CollectionSource, OperationData, -}; -use apollo_mcp_registry::platform_api::operation_collections::error::CollectionError; -use apollo_mcp_registry::platform_api::operation_collections::event::CollectionEvent; -use apollo_mcp_registry::uplink::persisted_queries::ManifestSource; -use apollo_mcp_registry::uplink::persisted_queries::event::Event as ManifestEvent; -use futures::{Stream, StreamExt}; -use regex::Regex; -use reqwest::header::{HeaderMap, HeaderName, HeaderValue}; -use rmcp::model::{ErrorCode, ToolAnnotations}; -use rmcp::schemars::Map; -use rmcp::{ - model::Tool, - schemars::schema::{ - ArrayValidation, InstanceType, Metadata, ObjectValidation, RootSchema, Schema, - SchemaObject, SingleOrVec, SubschemaValidation, - }, - serde_json::{self, Value}, -}; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use std::collections::HashMap; -use std::fs; -use std::path::PathBuf; -use std::str::FromStr; -use std::sync::{Arc, Mutex}; -use tracing::{debug, info, warn}; - -const OPERATION_DOCUMENT_EXTENSION: &str = "graphql"; - -/// The source of the operations exposed as MCP tools -#[derive(Clone)] -pub enum OperationSource { - /// GraphQL document files - Files(Vec), - - /// Persisted Query manifest - Manifest(ManifestSource), - - /// Operation collection - Collection(CollectionSource), - - /// No operations provided - None, -} - -impl OperationSource { - pub async fn into_stream(self) -> impl Stream { - match self { - OperationSource::Files(paths) => Self::stream_file_changes(paths).boxed(), - OperationSource::Manifest(manifest_source) => manifest_source - .into_stream() - .await - .map(|event| { - let ManifestEvent::UpdateManifest(operations) = event; - Event::OperationsUpdated( - operations.into_iter().map(RawOperation::from).collect(), - ) - }) - .boxed(), - OperationSource::Collection(collection_source) => collection_source - .into_stream() - .map(|event| match event { - CollectionEvent::UpdateOperationCollection(operations) => { - match operations - .iter() - .map(RawOperation::try_from) - .collect::, _>>() - { - Ok(operations) => Event::OperationsUpdated(operations), - Err(e) => Event::CollectionError(e), - } - } - CollectionEvent::CollectionError(error) => Event::CollectionError(error), - }) - .boxed(), - OperationSource::None => { - futures::stream::once(async { Event::OperationsUpdated(vec![]) }).boxed() - } - } - } - - fn stream_file_changes(paths: Vec) -> impl Stream { - let path_count = paths.len(); - let state = Arc::new(Mutex::new(HashMap::>::new())); - futures::stream::select_all(paths.into_iter().map(|path| { - let state = Arc::clone(&state); - files::watch(path.as_ref()) - .filter_map(move |_| { - let path = path.clone(); - let state = Arc::clone(&state); - async move { - let mut operations = Vec::new(); - if path.is_dir() { - // Handle a directory - if let Ok(entries) = fs::read_dir(&path) { - for entry in entries.flatten() { - let entry_path = entry.path(); - if entry_path.extension().and_then(|e| e.to_str()) - == Some(OPERATION_DOCUMENT_EXTENSION) - { - match fs::read_to_string(&entry_path) { - Ok(content) => { - // Be forgiving of empty files in the directory case. - // It likely means a new file was created in an editor, - // but the operation hasn't been written yet. - if !content.trim().is_empty() { - operations.push(RawOperation::from(( - content, - entry_path.to_str().map(|s| s.to_string()), - ))); - } - } - Err(e) => { - return Some(Event::OperationError( - e, - path.to_str().map(|s| s.to_string()), - )); - } - } - } - } - } - } else { - // Handle a single file - match fs::read_to_string(&path) { - Ok(content) => { - if !content.trim().is_empty() { - operations.push(RawOperation::from(( - content, - path.to_str().map(|s| s.to_string()), - ))); - } else { - warn!(?path, "Empty operation file"); - } - } - Err(e) => { - return Some(Event::OperationError( - e, - path.to_str().map(|s| s.to_string()), - )); - } - } - } - match state.lock() { - Ok(mut state) => { - state.insert(path.clone(), operations); - // All paths send an initial event on startup. To avoid repeated - // operation events on startup, wait until all paths have been - // loaded, then send a single event with the operations for all - // paths. - if state.len() == path_count { - Some(Event::OperationsUpdated( - state.values().flatten().cloned().collect::>(), - )) - } else { - None - } - } - Err(_) => Some(Event::OperationError( - std::io::Error::other("State mutex poisoned"), - path.to_str().map(|s| s.to_string()), - )), - } - } - }) - .boxed() - })) - .boxed() - } -} - -impl From for OperationSource { - fn from(manifest_source: ManifestSource) -> Self { - OperationSource::Manifest(manifest_source) - } -} - -impl From> for OperationSource { - fn from(paths: Vec) -> Self { - OperationSource::Files(paths) - } -} - -#[derive(Clone, Default, Debug, Deserialize, Serialize, PartialEq, Copy, JsonSchema)] -#[serde(rename_all = "snake_case")] -pub enum MutationMode { - /// Don't allow any mutations - #[default] - None, - /// Allow explicit mutations, but don't allow the LLM to build them - Explicit, - /// Allow the LLM to build mutations - All, -} - -#[derive(Debug, Clone)] -pub struct RawOperation { - source_text: String, - persisted_query_id: Option, - headers: Option>, - variables: Option>, - source_path: Option, -} - -// Custom Serialize implementation for RawOperation -// This is needed because reqwest HeaderMap/HeaderValue/HeaderName don't derive Serialize -impl serde::Serialize for RawOperation { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - use serde::ser::SerializeStruct; - let mut state = serializer.serialize_struct("RawOperation", 4)?; - state.serialize_field("source_text", &self.source_text)?; - if let Some(ref id) = self.persisted_query_id { - state.serialize_field("persisted_query_id", id)?; - } - if let Some(ref variables) = self.variables { - state.serialize_field("variables", variables)?; - } - if let Some(ref headers) = self.headers { - state.serialize_field( - "headers", - headers - .iter() - .map(|(name, value)| { - format!("{}: {}", name, value.to_str().unwrap_or_default()) - }) - .collect::>() - .join("\n") - .as_str(), - )?; - } - if let Some(ref path) = self.source_path { - state.serialize_field("source_path", path)?; - } - - state.end() - } -} - -impl From<(String, Option)> for RawOperation { - fn from((source_text, source_path): (String, Option)) -> Self { - Self { - persisted_query_id: None, - source_text, - headers: None, - variables: None, - source_path, - } - } -} - -impl From<(String, String)> for RawOperation { - fn from((persisted_query_id, source_text): (String, String)) -> Self { - Self { - persisted_query_id: Some(persisted_query_id), - source_text, - headers: None, - variables: None, - source_path: None, - } - } -} - -impl TryFrom<&OperationData> for RawOperation { - type Error = CollectionError; - - fn try_from(operation_data: &OperationData) -> Result { - let variables = if let Some(variables) = operation_data.variables.as_ref() { - if variables.trim().is_empty() { - Some(HashMap::new()) - } else { - Some( - serde_json::from_str::>(variables) - .map_err(|_| CollectionError::InvalidVariables(variables.clone()))?, - ) - } - } else { - None - }; - - let headers = if let Some(headers) = operation_data.headers.as_ref() { - let mut header_map = HeaderMap::new(); - for header in headers { - header_map.insert( - HeaderName::from_str(&header.0).map_err(CollectionError::HeaderName)?, - HeaderValue::from_str(&header.1).map_err(CollectionError::HeaderValue)?, - ); - } - Some(header_map) - } else { - None - }; - - Ok(Self { - persisted_query_id: None, - source_text: operation_data.source_text.clone(), - headers, - variables, - source_path: None, - }) - } -} - -impl RawOperation { - pub(crate) fn into_operation( - self, - schema: &Valid, - custom_scalars: Option<&CustomScalarMap>, - mutation_mode: MutationMode, - disable_type_description: bool, - disable_schema_description: bool, - ) -> Result, OperationError> { - Operation::from_document( - self, - schema, - custom_scalars, - mutation_mode, - disable_type_description, - disable_schema_description, - ) - } -} - -#[derive(Debug, Clone, Serialize)] -pub struct Operation { - tool: Tool, - inner: RawOperation, - operation_name: String, -} - -impl AsRef for Operation { - fn as_ref(&self) -> &Tool { - &self.tool - } -} - -impl From for Tool { - fn from(value: Operation) -> Tool { - value.tool - } -} - -impl Operation { - pub(crate) fn into_inner(self) -> RawOperation { - self.inner - } -} - -#[allow(clippy::type_complexity)] -pub fn operation_defs( - source_text: &str, - allow_mutations: bool, - source_path: Option, -) -> Result, Option)>, OperationError> { - let source_path_clone = source_path.clone(); - let document = Parser::new() - .parse_ast( - source_text, - source_path_clone.unwrap_or_else(|| "operation.graphql".to_string()), - ) - .map_err(|e| OperationError::GraphQLDocument(Box::new(e)))?; - let mut last_offset: Option = Some(0); - let mut operation_defs = document.definitions.clone().into_iter().filter_map(|def| { - let description = match def.location() { - Some(source_span) => { - let description = last_offset - .map(|start_offset| &source_text[start_offset..source_span.offset()]); - last_offset = Some(source_span.end_offset()); - description - } - None => { - last_offset = None; - None - } - }; - - match def { - Definition::OperationDefinition(operation_def) => { - Some((operation_def, description)) - } - Definition::FragmentDefinition(_) => None, - _ => { - eprintln!("Schema definitions were passed in, but only operations and fragments are allowed"); - None - } - } - }); - - let (operation, comments) = match (operation_defs.next(), operation_defs.next()) { - (None, _) => { - return Err(OperationError::NoOperations { source_path }); - } - (_, Some(_)) => { - return Err(OperationError::TooManyOperations { - source_path, - count: 2 + operation_defs.count(), - }); - } - (Some(op), None) => op, - }; - - match operation.operation_type { - OperationType::Subscription => { - debug!( - "Skipping subscription operation {}", - operation_name(&operation, source_path)? - ); - return Ok(None); - } - OperationType::Mutation => { - if !allow_mutations { - warn!( - "Skipping mutation operation {}", - operation_name(&operation, source_path)? - ); - return Ok(None); - } - } - OperationType::Query => {} - } - - Ok(Some((document, operation, comments.map(|c| c.to_string())))) -} - -pub fn extract_and_format_comments(comments: Option) -> Option { - comments.and_then(|comments| { - let content = Regex::new(r"(\n|^)(\s*,*)*#") - .ok()? - .replace_all(comments.as_str(), "$1"); - let trimmed = content.trim(); - - if trimmed.is_empty() { - None - } else { - Some(trimmed.to_string()) - } - }) -} - -pub fn find_opening_parens_offset( - source_text: &str, - operation_definition: &Node, -) -> Option { - let regex = match Regex::new(r"(?m)^\s*\(") { - Ok(regex) => regex, - Err(_) => return None, - }; - - operation_definition - .name - .as_ref() - .and_then(|n| n.location()) - .map(|span| { - regex - .find(source_text[span.end_offset()..].as_ref()) - .map(|m| m.start() + m.len() + span.end_offset()) - .unwrap_or(0) - }) -} - -pub fn variable_description_overrides( - source_text: &str, - operation_definition: &Node, -) -> HashMap { - let mut argument_overrides_map: HashMap = HashMap::new(); - let mut last_offset = find_opening_parens_offset(source_text, operation_definition); - operation_definition - .variables - .iter() - .for_each(|v| match v.location() { - Some(source_span) => { - let comment = last_offset - .map(|start_offset| &source_text[start_offset..source_span.offset()]); - - if let Some(description) = comment.filter(|d| !d.is_empty() && d.contains('#')) { - if let Some(description) = - extract_and_format_comments(Some(description.to_string())) - { - argument_overrides_map.insert(v.name.to_string(), description); - } - } - - last_offset = Some(source_span.end_offset()); - } - None => { - last_offset = None; - } - }); - - argument_overrides_map -} - -impl Operation { - pub fn from_document( - raw_operation: RawOperation, - graphql_schema: &GraphqlSchema, - custom_scalar_map: Option<&CustomScalarMap>, - mutation_mode: MutationMode, - disable_type_description: bool, - disable_schema_description: bool, - ) -> Result, OperationError> { - if let Some((document, operation, comments)) = operation_defs( - &raw_operation.source_text, - mutation_mode != MutationMode::None, - raw_operation.source_path.clone(), - )? { - let operation_name = match operation_name(&operation, raw_operation.source_path.clone()) - { - Ok(name) => name, - Err(OperationError::MissingName { - source_path, - operation, - }) => { - if let Some(path) = source_path { - warn!("Skipping unnamed operation in {path}: {operation}"); - } else { - warn!("Skipping unnamed operation: {operation}"); - } - return Ok(None); - } - Err(e) => return Err(e), - }; - let variable_description_overrides = - variable_description_overrides(&raw_operation.source_text, &operation); - let mut tree_shaker = SchemaTreeShaker::new(graphql_schema); - tree_shaker.retain_operation(&operation, &document, DepthLimit::Unlimited); - - let description = Self::tool_description( - comments, - &mut tree_shaker, - graphql_schema, - &operation, - disable_type_description, - disable_schema_description, - ); - - let mut object = serde_json::to_value(get_json_schema( - &operation, - tree_shaker.argument_descriptions(), - &variable_description_overrides, - graphql_schema, - custom_scalar_map, - raw_operation.variables.as_ref(), - ))?; - - // make sure that the properties field exists since schemas::ObjectValidation is - // configured to skip empty maps (in the case where there are no input args) - ensure_properties_exists(&mut object); - - let Value::Object(schema) = object else { - return Err(OperationError::Internal( - "Schemars should have returned an object".to_string(), - )); - }; - - let tool: Tool = Tool::new(operation_name.clone(), description, schema).annotate( - ToolAnnotations::new() - .read_only(operation.operation_type != OperationType::Mutation), - ); - let character_count = tool_character_length(&tool); - match character_count { - Ok(length) => info!( - "Tool {} loaded with a character count of {}. Estimated tokens: {}", - operation_name, - length, - length / 4 // We don't know the tokenization algorithm, so we just use 4 characters per token as a rough estimate. https://docs.anthropic.com/en/docs/resources/glossary#tokens - ), - Err(_) => info!( - "Tool {} loaded with an unknown character count", - operation_name - ), - } - Ok(Some(Operation { - tool, - inner: raw_operation, - operation_name, - })) - } else { - Ok(None) - } - } - - /// Generate a description for an operation based on documentation in the schema - fn tool_description( - comments: Option, - tree_shaker: &mut SchemaTreeShaker, - graphql_schema: &GraphqlSchema, - operation_def: &Node, - disable_type_description: bool, - disable_schema_description: bool, - ) -> String { - let comment_description = extract_and_format_comments(comments); - - match comment_description { - Some(description) => description, - None => { - // Add the tree-shaken types to the end of the tool description - let mut lines = vec![]; - if !disable_type_description { - let descriptions = operation_def - .selection_set - .iter() - .filter_map(|selection| { - match selection { - Selection::Field(field) => { - let field_name = field.name.to_string(); - let operation_type = operation_def.operation_type; - if let Some(root_name) = - graphql_schema.root_operation(operation_type) - { - // Find the root field referenced by the operation - let root = graphql_schema.get_object(root_name)?; - let field_definition = root - .fields - .iter() - .find(|(name, _)| { - let name = name.to_string(); - name == field_name - }) - .map(|(_, field_definition)| { - field_definition.node.clone() - }); - - // Add the root field description to the tool description - let field_description = field_definition - .clone() - .and_then(|field| field.description.clone()) - .map(|node| node.to_string()); - - // Add information about the return type - let ty = field_definition.map(|field| field.ty.clone()); - let type_description = - ty.as_ref().map(Self::type_description); - - Some( - vec![field_description, type_description] - .into_iter() - .flatten() - .collect::>() - .join("\n"), - ) - } else { - None - } - } - _ => None, - } - }) - .collect::>() - .join("\n---\n"); - - // Add the tree-shaken types to the end of the tool description - - lines.push(descriptions); - } - if !disable_schema_description { - let shaken_schema = - tree_shaker.shaken().unwrap_or_else(|schema| schema.partial); - - let mut types = shaken_schema - .types - .iter() - .filter(|(_name, extended_type)| { - !extended_type.is_built_in() - && matches!( - extended_type, - ExtendedType::Object(_) - | ExtendedType::Scalar(_) - | ExtendedType::Enum(_) - | ExtendedType::Interface(_) - | ExtendedType::Union(_) - ) - && graphql_schema - .root_operation(operation_def.operation_type) - .is_none_or(|op_name| extended_type.name() != op_name) - && graphql_schema - .root_operation(OperationType::Query) - .is_none_or(|op_name| extended_type.name() != op_name) - }) - .peekable(); - if types.peek().is_some() { - lines.push(String::from("---")); - } - - for ty in types { - lines.push(ty.1.serialize().to_string()); - } - } - lines.join("\n") - } - } - } - - fn type_description(ty: &Type) -> String { - let type_name = ty.inner_named_type(); - let mut lines = vec![]; - let optional = if ty.is_non_null() { - "" - } else { - "is optional and " - }; - let array = if ty.is_list() { - "is an array of type" - } else { - "has type" - }; - lines.push(format!( - "The returned value {optional}{array} `{type_name}`" - )); - - lines.join("\n") - } -} - -fn ensure_properties_exists(json_object: &mut Value) { - if let Some(obj_type) = json_object.get("type") { - if obj_type == "object" { - if let Some(obj_map) = json_object.as_object_mut() { - let props = obj_map - .entry("properties") - .or_insert_with(|| Value::Object(serde_json::Map::new())); - if !props.is_object() { - *props = Value::Object(serde_json::Map::new()); - } - } - } - } -} - -pub fn operation_name( - operation: &Node, - source_path: Option, -) -> Result { - Ok(operation - .name - .as_ref() - .ok_or_else(|| OperationError::MissingName { - source_path, - operation: operation.serialize().no_indent().to_string(), - })? - .to_string()) -} - -fn tool_character_length(tool: &Tool) -> Result { - let tool_schema_string = serde_json::to_string_pretty(&serde_json::json!(tool.input_schema))?; - Ok(tool.name.len() - + tool.description.as_ref().map(|d| d.len()).unwrap_or(0) - + tool_schema_string.len()) -} - -fn get_json_schema( - operation: &Node, - schema_argument_descriptions: &HashMap>, - argument_descriptions_overrides: &HashMap, - graphql_schema: &GraphqlSchema, - custom_scalar_map: Option<&CustomScalarMap>, - variable_overrides: Option<&HashMap>, -) -> RootSchema { - let mut obj = ObjectValidation::default(); - let mut definitions = Map::new(); - - operation.variables.iter().for_each(|variable| { - let variable_name = variable.name.to_string(); - if !variable_overrides - .map(|o| o.contains_key(&variable_name)) - .unwrap_or_default() - { - // use overridden description if there is one, otherwise use the schema description - let description: Option = - match argument_descriptions_overrides.get(&variable_name) { - Some(description) => Some(description.clone()), - None => schema_argument_descriptions - .get(&variable_name) - .filter(|d| !d.is_empty()) - .map(|d| d.join("#")), - }; - - let schema = type_to_schema( - description, - variable.ty.as_ref(), - graphql_schema, - custom_scalar_map, - &mut definitions, - ); - obj.properties.insert(variable_name.clone(), schema); - if variable.ty.is_non_null() { - obj.required.insert(variable_name); - } - } - }); - - RootSchema { - schema: SchemaObject { - instance_type: Some(SingleOrVec::Single(Box::new(InstanceType::Object))), - object: Some(Box::new(obj)), - ..Default::default() - }, - definitions, - ..Default::default() - } -} - -fn schema_factory( - description: Option, - instance_type: Option, - object_validation: Option, - array_validation: Option, - subschema_validation: Option, - enum_values: Option>, -) -> Schema { - Schema::Object(SchemaObject { - instance_type: instance_type - .map(|instance_type| SingleOrVec::Single(Box::new(instance_type))), - object: object_validation.map(Box::new), - array: array_validation.map(Box::new), - subschemas: subschema_validation.map(Box::new), - enum_values, - metadata: Some(Box::new(Metadata { - description, - ..Default::default() - })), - ..Default::default() - }) -} - -fn input_object_description(name: &Name, graphql_schema: &GraphqlSchema) -> Option { - if let Some(input_object) = graphql_schema.get_input_object(name) { - input_object.description.as_ref().map(|d| d.to_string()) - } else if let Some(scalar) = graphql_schema.get_scalar(name) { - scalar.description.as_ref().map(|d| d.to_string()) - } else if let Some(enum_type) = graphql_schema.get_enum(name) { - let values = enum_type - .values - .iter() - .map(|(name, value)| { - format!( - "{}: {}", - name, - value - .description - .as_ref() - .map(|d| d.to_string()) - .unwrap_or_default() - ) - }) - .collect::>() - .join("\n"); - Some(format!( - "{}\n\nValues:\n{}", - enum_type - .description - .as_ref() - .map(|d| d.to_string()) - .unwrap_or_default(), - values - )) - } else { - None - } -} - -fn type_to_schema( - description: Option, - variable_type: &Type, - graphql_schema: &GraphqlSchema, - custom_scalar_map: Option<&CustomScalarMap>, - definitions: &mut Map, -) -> Schema { - match variable_type { - Type::NonNullNamed(named) | Type::Named(named) => match named.as_str() { - "String" | "ID" => schema_factory( - description, - Some(InstanceType::String), - None, - None, - None, - None, - ), - "Int" | "Float" => schema_factory( - description, - Some(InstanceType::Number), - None, - None, - None, - None, - ), - "Boolean" => schema_factory( - description, - Some(InstanceType::Boolean), - None, - None, - None, - None, - ), - _ => { - if let Some(input_type) = graphql_schema.get_input_object(named) { - if !definitions.contains_key(named.as_str()) { - definitions - .insert(named.to_string(), Schema::Object(SchemaObject::default())); // Insert temporary value into map so any recursive references will not try to also create it. - let mut obj = ObjectValidation::default(); - - input_type.fields.iter().for_each(|(name, field)| { - let description = field.description.as_ref().map(|n| n.to_string()); - obj.properties.insert( - name.to_string(), - type_to_schema( - description, - field.ty.as_ref(), - graphql_schema, - custom_scalar_map, - definitions, - ), - ); - - if field.is_required() { - obj.required.insert(name.to_string()); - } - }); - - definitions.insert( - named.to_string(), - schema_factory( - input_object_description(named, graphql_schema), - Some(InstanceType::Object), - Some(obj), - None, - None, - None, - ), - ); - } - - Schema::Object(SchemaObject { - metadata: Some(Box::new(Metadata { - description, - ..Default::default() - })), - reference: Some(format!("#/definitions/{named}")), - ..Default::default() - }) - } else if graphql_schema.get_scalar(named).is_some() { - if !definitions.contains_key(named.as_str()) { - let default_description = input_object_description(named, graphql_schema); - if let Some(custom_scalar_map) = custom_scalar_map { - if let Some(custom_scalar_schema_object) = - custom_scalar_map.get(named.as_str()) - { - let mut custom_schema = custom_scalar_schema_object.clone(); - let mut meta = *custom_schema.metadata.unwrap_or_default(); - // If description isn't included in custom schema, inject the one from the schema - if meta.description.is_none() { - meta.description = default_description; - } - custom_schema.metadata = Some(Box::new(meta)); - definitions - .insert(named.to_string(), Schema::Object(custom_schema)); - } else { - warn!(name=?named, "custom scalar missing from custom_scalar_map"); - definitions.insert( - named.to_string(), - schema_factory( - default_description, - None, - None, - None, - None, - None, - ), - ); - } - } else { - warn!(name=?named, "custom scalars aren't currently supported without a custom_scalar_map"); - definitions.insert( - named.to_string(), - schema_factory(default_description, None, None, None, None, None), - ); - } - } - Schema::Object(SchemaObject { - metadata: Some(Box::new(Metadata { - description, - ..Default::default() - })), - reference: Some(format!("#/definitions/{named}")), - ..Default::default() - }) - } else if let Some(enum_type) = graphql_schema.get_enum(named) { - if !definitions.contains_key(named.as_str()) { - definitions.insert( - named.to_string(), - schema_factory( - input_object_description(named, graphql_schema), - Some(InstanceType::String), - None, - None, - None, - Some( - enum_type - .values - .iter() - .map(|(_name, value)| serde_json::json!(value.value)) - .collect(), - ), - ), - ); - } - Schema::Object(SchemaObject { - metadata: Some(Box::new(Metadata { - description, - ..Default::default() - })), - reference: Some(format!("#/definitions/{named}")), - ..Default::default() - }) - } else { - warn!(name=?named, "Type not found in schema"); - schema_factory(None, None, None, None, None, None) - } - } - }, - Type::NonNullList(list_type) | Type::List(list_type) => { - let inner_type_schema = type_to_schema( - description, - list_type, - graphql_schema, - custom_scalar_map, - definitions, - ); - let items_schema = if list_type.is_non_null() { - inner_type_schema - } else { - schema_factory( - None, - None, - None, - None, - Some(SubschemaValidation { - one_of: Some(vec![ - inner_type_schema, - Schema::Object(SchemaObject { - instance_type: Some(SingleOrVec::Single(Box::new( - InstanceType::Null, - ))), - ..Default::default() - }), - ]), - ..Default::default() - }), - None, - ) - }; - - schema_factory( - None, - Some(InstanceType::Array), - None, - Some(ArrayValidation { - items: Some(SingleOrVec::Single(Box::new(items_schema))), - ..Default::default() - }), - None, - None, - ) - } - } -} - -impl graphql::Executable for Operation { - fn persisted_query_id(&self) -> Option { - // TODO: id was being overridden, should we be returning? Should this be behind a flag? self.inner.persisted_query_id.clone() - None - } - - fn operation(&self, _input: Value) -> Result { - Ok(OperationDetails { - query: self.inner.source_text.clone(), - operation_name: Some(self.operation_name.clone()), - }) - } - - fn variables(&self, input_variables: Value) -> Result { - if let Some(raw_variables) = self.inner.variables.as_ref() { - let mut variables = match input_variables { - Value::Null => Ok(serde_json::Map::new()), - Value::Object(obj) => Ok(obj.clone()), - _ => Err(McpError::new( - ErrorCode::INVALID_PARAMS, - "Invalid input".to_string(), - None, - )), - }?; - - raw_variables.iter().try_for_each(|(key, value)| { - if variables.contains_key(key) { - Err(McpError::new( - ErrorCode::INVALID_PARAMS, - "No such parameter: {key}", - None, - )) - } else { - variables.insert(key.clone(), value.clone()); - Ok(()) - } - })?; - - Ok(Value::Object(variables)) - } else { - Ok(input_variables) - } - } - - fn headers(&self, default_headers: &HeaderMap) -> HeaderMap { - match self.inner.headers.as_ref() { - None => default_headers.clone(), - Some(raw_headers) if default_headers.is_empty() => raw_headers.clone(), - Some(raw_headers) => { - let mut headers = default_headers.clone(); - raw_headers.iter().for_each(|(key, value)| { - if headers.contains_key(key) { - tracing::debug!( - "Header {} has a default value, overwriting with operation value", - key - ); - } - headers.insert(key, value.clone()); - }); - headers - } - } - } -} - -#[cfg(test)] -mod tests { - use crate::graphql::Executable; - use apollo_compiler::{Schema, parser::Parser, validation::Valid}; - use rmcp::serde_json::Value; - use rmcp::{model::Tool, serde_json}; - use std::{collections::HashMap, str::FromStr, sync::LazyLock}; - use tracing_test::traced_test; - - use crate::{ - custom_scalar_map::CustomScalarMap, - operations::{MutationMode, Operation, RawOperation}, - }; - - // Example schema for tests - static SCHEMA: LazyLock> = LazyLock::new(|| { - Schema::parse( - r#" - type Query { - id: String - enum: RealEnum - customQuery(""" id description """ id: ID!, """ a flag """ flag: Boolean): OutputType - testOp: OpResponse - } - type Mutation {id: String } - - """ - RealCustomScalar exists - """ - scalar RealCustomScalar - input RealInputObject { - """ - optional is a input field that is optional - """ - optional: String - - """ - required is a input field that is required - """ - required: String! - } - - type OpResponse { - id: String - } - - """ - the description for the enum - """ - enum RealEnum { - """ - ENUM_VALUE_1 is a value - """ - ENUM_VALUE_1 - - """ - ENUM_VALUE_2 is a value - """ - ENUM_VALUE_2 - } - - """ - custom output type - """ - type OutputType { - id: ID! - } - "#, - "operation.graphql", - ) - .expect("schema should parse") - .validate() - .expect("schema should be valid") - }); - - #[test] - fn subscriptions() { - assert!( - Operation::from_document( - RawOperation { - source_text: "subscription SubscriptionName { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .is_none() - ); - } - - #[test] - fn mutation_mode_none() { - assert!( - Operation::from_document( - RawOperation { - source_text: "mutation MutationName { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .ok() - .unwrap() - .is_none() - ); - } - - #[test] - fn mutation_mode_explicit() { - let operation = Operation::from_document( - RawOperation { - source_text: "mutation MutationName { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::Explicit, - false, - false, - ) - .unwrap() - .unwrap(); - - insta::assert_debug_snapshot!(operation, @r#" - Operation { - tool: Tool { - name: "MutationName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object {}, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - false, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - }, - inner: RawOperation { - source_text: "mutation MutationName { id }", - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - operation_name: "MutationName", - } - "#); - } - - #[test] - fn mutation_mode_all() { - let operation = Operation::from_document( - RawOperation { - source_text: "mutation MutationName { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::All, - false, - false, - ) - .unwrap() - .unwrap(); - - insta::assert_debug_snapshot!(operation, @r#" - Operation { - tool: Tool { - name: "MutationName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object {}, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - false, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - }, - inner: RawOperation { - source_text: "mutation MutationName { id }", - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - operation_name: "MutationName", - } - "#); - } - - #[test] - fn no_variables() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r#" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object {}, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "#); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": {} - } - "#); - } - - #[test] - fn nullable_named_type() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: ID) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r#" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object { - "id": Object { - "type": String("string"), - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "#); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": { - "id": { - "type": "string" - } - } - } - "#); - } - - #[test] - fn non_nullable_named_type() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: ID!) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r#" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "required": Array [ - String("id"), - ], - "properties": Object { - "id": Object { - "type": String("string"), - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "#); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "required": [ - "id" - ], - "properties": { - "id": { - "type": "string" - } - } - } - "#); - } - - #[test] - fn non_nullable_list_of_nullable_named_type() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: [ID]!) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r#" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "required": Array [ - String("id"), - ], - "properties": Object { - "id": Object { - "type": String("array"), - "items": Object { - "oneOf": Array [ - Object { - "type": String("string"), - }, - Object { - "type": String("null"), - }, - ], - }, - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "#); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "required": [ - "id" - ], - "properties": { - "id": { - "type": "array", - "items": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] - } - } - } - } - "#); - } - - #[test] - fn non_nullable_list_of_non_nullable_named_type() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: [ID!]!) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r#" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "required": Array [ - String("id"), - ], - "properties": Object { - "id": Object { - "type": String("array"), - "items": Object { - "type": String("string"), - }, - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "#); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "required": [ - "id" - ], - "properties": { - "id": { - "type": "array", - "items": { - "type": "string" - } - } - } - } - "#); - } - - #[test] - fn nullable_list_of_nullable_named_type() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: [ID]) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r#" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object { - "id": Object { - "type": String("array"), - "items": Object { - "oneOf": Array [ - Object { - "type": String("string"), - }, - Object { - "type": String("null"), - }, - ], - }, - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "#); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": { - "id": { - "type": "array", - "items": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] - } - } - } - } - "#); - } - - #[test] - fn nullable_list_of_non_nullable_named_type() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: [ID!]) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r#" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object { - "id": Object { - "type": String("array"), - "items": Object { - "type": String("string"), - }, - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "#); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": { - "id": { - "type": "array", - "items": { - "type": "string" - } - } - } - } - "#); - } - - #[test] - fn nullable_list_of_nullable_lists_of_nullable_named_types() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: [[ID]]) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r#" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object { - "id": Object { - "type": String("array"), - "items": Object { - "oneOf": Array [ - Object { - "type": String("array"), - "items": Object { - "oneOf": Array [ - Object { - "type": String("string"), - }, - Object { - "type": String("null"), - }, - ], - }, - }, - Object { - "type": String("null"), - }, - ], - }, - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "#); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": { - "id": { - "type": "array", - "items": { - "oneOf": [ - { - "type": "array", - "items": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] - } - }, - { - "type": "null" - } - ] - } - } - } - } - "#); - } - - #[test] - fn nullable_input_object() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: RealInputObject) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r##" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object { - "id": Object { - "$ref": String("#/definitions/RealInputObject"), - }, - }, - "definitions": Object { - "RealInputObject": Object { - "type": String("object"), - "required": Array [ - String("required"), - ], - "properties": Object { - "optional": Object { - "description": String("optional is a input field that is optional"), - "type": String("string"), - }, - "required": Object { - "description": String("required is a input field that is required"), - "type": String("string"), - }, - }, - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "##); - } - - #[test] - fn non_nullable_enum() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: RealEnum!) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r##" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "required": Array [ - String("id"), - ], - "properties": Object { - "id": Object { - "$ref": String("#/definitions/RealEnum"), - }, - }, - "definitions": Object { - "RealEnum": Object { - "description": String("the description for the enum\n\nValues:\nENUM_VALUE_1: ENUM_VALUE_1 is a value\nENUM_VALUE_2: ENUM_VALUE_2 is a value"), - "type": String("string"), - "enum": Array [ - String("ENUM_VALUE_1"), - String("ENUM_VALUE_2"), - ], - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "##); - } - - #[test] - fn multiple_operations_should_error() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName { id } query QueryName { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: Some("operation.graphql".to_string()), - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ); - insta::assert_debug_snapshot!(operation, @r#" - Err( - TooManyOperations { - source_path: Some( - "operation.graphql", - ), - count: 2, - }, - ) - "#); - } - - #[test] - #[traced_test] - fn unnamed_operations_should_be_skipped() { - let operation = Operation::from_document( - RawOperation { - source_text: "query { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: Some("operation.graphql".to_string()), - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ); - assert!(operation.unwrap().is_none()); - - logs_assert(|lines: &[&str]| { - lines - .iter() - .filter(|line| line.contains("WARN")) - .any(|line| { - line.contains("Skipping unnamed operation in operation.graphql: { id }") - }) - .then_some(()) - .ok_or("Expected warning about unnamed operation in logs".to_string()) - }); - } - - #[test] - fn no_operations_should_error() { - let operation = Operation::from_document( - RawOperation { - source_text: "fragment Test on Query { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: Some("operation.graphql".to_string()), - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ); - insta::assert_debug_snapshot!(operation, @r#" - Err( - NoOperations { - source_path: Some( - "operation.graphql", - ), - }, - ) - "#); - } - - #[test] - fn schema_should_error() { - let operation = Operation::from_document( - RawOperation { - source_text: "type Query { id: String }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ); - insta::assert_debug_snapshot!(operation, @r" - Err( - NoOperations { - source_path: None, - }, - ) - "); - } - - #[test] - #[traced_test] - fn unknown_type_should_be_any() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: FakeType) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - // Verify that a warning was logged - logs_assert(|lines: &[&str]| { - lines - .iter() - .filter(|line| line.contains("WARN")) - .any(|line| line.contains("Type not found in schema name=\"FakeType\"")) - .then_some(()) - .ok_or("Expected warning about unknown type in logs".to_string()) - }); - - insta::assert_debug_snapshot!(tool, @r#" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object { - "id": Object {}, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "#); - } - - #[test] - #[traced_test] - fn custom_scalar_without_map_should_be_any() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: RealCustomScalar) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - // Verify that a warning was logged - logs_assert(|lines: &[&str]| { - lines - .iter() - .filter(|line| line.contains("WARN")) - .any(|line| line.contains("custom scalars aren't currently supported without a custom_scalar_map name=\"RealCustomScalar\"")) - .then_some(()) - .ok_or("Expected warning about custom scalar without map in logs".to_string()) - }); - - insta::assert_debug_snapshot!(tool, @r##" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object { - "id": Object { - "$ref": String("#/definitions/RealCustomScalar"), - }, - }, - "definitions": Object { - "RealCustomScalar": Object { - "description": String("RealCustomScalar exists"), - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "##); - } - - #[test] - #[traced_test] - fn custom_scalar_with_map_but_not_found_should_error() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: RealCustomScalar) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - Some(&CustomScalarMap::from_str("{}").unwrap()), - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - // Verify that a warning was logged - logs_assert(|lines: &[&str]| { - lines - .iter() - .filter(|line| line.contains("WARN")) - .any(|line| { - line.contains( - "custom scalar missing from custom_scalar_map name=\"RealCustomScalar\"", - ) - }) - .then_some(()) - .ok_or("Expected warning about custom scalar missing in logs".to_string()) - }); - - insta::assert_debug_snapshot!(tool, @r##" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object { - "id": Object { - "$ref": String("#/definitions/RealCustomScalar"), - }, - }, - "definitions": Object { - "RealCustomScalar": Object { - "description": String("RealCustomScalar exists"), - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "##); - } - - #[test] - fn custom_scalar_with_map() { - let custom_scalar_map = - CustomScalarMap::from_str("{ \"RealCustomScalar\": { \"type\": \"string\" }}"); - - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: RealCustomScalar) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - custom_scalar_map.ok().as_ref(), - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r##" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object { - "id": Object { - "$ref": String("#/definitions/RealCustomScalar"), - }, - }, - "definitions": Object { - "RealCustomScalar": Object { - "description": String("RealCustomScalar exists"), - "type": String("string"), - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "##); - } - - #[test] - fn test_tool_description() { - const SCHEMA: &str = r#" - type Query { - """ - Get a list of A - """ - a(input: String!): [A]! - - """ - Get a B - """ - b: B - - """ - Get a Z - """ - z: Z - } - - """ - A - """ - type A { - c: String - d: D - } - - """ - B - """ - type B { - d: D - u: U - } - - """ - D - """ - type D { - e: E - f: String - g: String - } - - """ - E - """ - enum E { - """ - one - """ - ONE - """ - two - """ - TWO - } - - """ - F - """ - scalar F - - """ - U - """ - union U = M | W - - """ - M - """ - type M { - m: Int - } - - """ - W - """ - type W { - w: Int - } - - """ - Z - """ - type Z { - z: Int - zz: Int - zzz: Int - } - "#; - - let document = Parser::new().parse_ast(SCHEMA, "schema.graphql").unwrap(); - let schema = document.to_schema().unwrap(); - - let operation = Operation::from_document( - RawOperation { - source_text: r###" - query GetABZ($state: String!) { - a(input: $input) { - d { - e - } - } - b { - d { - ...JustF - } - u { - ... on M { - m - } - ... on W { - w - } - } - } - z { - ...JustZZZ - } - } - - fragment JustF on D { - f - } - - fragment JustZZZ on Z { - zzz - } - "### - .to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &schema, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - - insta::assert_snapshot!( - operation.tool.description.unwrap(), - @r#" - Get a list of A - The returned value is an array of type `A` - --- - Get a B - The returned value is optional and has type `B` - --- - Get a Z - The returned value is optional and has type `Z` - --- - """A""" - type A { - d: D - } - - """B""" - type B { - d: D - u: U - } - - """D""" - type D { - e: E - f: String - } - - """E""" - enum E { - """one""" - ONE - """two""" - TWO - } - - """U""" - union U = M | W - - """M""" - type M { - m: Int - } - - """W""" - type W { - w: Int - } - - """Z""" - type Z { - zzz: Int - } - "# - ); - } - - #[test] - fn tool_comment_description() { - let operation = Operation::from_document( - RawOperation { - source_text: r###" - # Overridden tool #description - query GetABZ($state: String!) { - b { - d { - f - } - } - } - "### - .to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - - insta::assert_snapshot!( - operation.tool.description.unwrap(), - @"Overridden tool #description" - ); - } - - #[test] - fn tool_empty_comment_description() { - let operation = Operation::from_document( - RawOperation { - source_text: r###" - # - - # - query GetABZ($state: String!) { - id - } - "### - .to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - - insta::assert_snapshot!( - operation.tool.description.unwrap(), - @"The returned value is optional and has type `String`" - ); - } - - #[test] - fn no_schema_description() { - let operation = Operation::from_document( - RawOperation { - source_text: r###"query GetABZ($state: String!) { id enum }"###.to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - true, - ) - .unwrap() - .unwrap(); - - insta::assert_snapshot!( - operation.tool.description.unwrap(), - @r" - The returned value is optional and has type `String` - --- - The returned value is optional and has type `RealEnum` - " - ); - } - - #[test] - fn no_type_description() { - let operation = Operation::from_document( - RawOperation { - source_text: r###"query GetABZ($state: String!) { id enum }"###.to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - true, - false, - ) - .unwrap() - .unwrap(); - - insta::assert_snapshot!( - operation.tool.description.unwrap(), - @r#" - --- - """the description for the enum""" - enum RealEnum { - """ENUM_VALUE_1 is a value""" - ENUM_VALUE_1 - """ENUM_VALUE_2 is a value""" - ENUM_VALUE_2 - } - "# - ); - } - - #[test] - fn no_type_description_or_schema_description() { - let operation = Operation::from_document( - RawOperation { - source_text: r###"query GetABZ($state: String!) { id enum }"###.to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - true, - true, - ) - .unwrap() - .unwrap(); - - insta::assert_snapshot!( - operation.tool.description.unwrap(), - @"" - ); - } - - #[test] - fn recursive_inputs() { - let operation = Operation::from_document( - RawOperation { - source_text: r###"query Test($filter: Filter){ - field(filter: $filter) { - id - } - }"### - .to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &Schema::parse( - r#" - """the filter input""" - input Filter { - """the filter.field field""" - field: String - """the filter.filter field""" - filter: Filter - } - type Query { - """the Query.field field""" - field( - """the filter argument""" - filter: Filter - ): String - } - "#, - "operation.graphql", - ) - .unwrap(), - None, - MutationMode::None, - true, - true, - ) - .unwrap() - .unwrap(); - - insta::assert_debug_snapshot!(operation.tool, @r##" - Tool { - name: "Test", - description: Some( - "", - ), - input_schema: { - "type": String("object"), - "properties": Object { - "filter": Object { - "description": String("the filter argument"), - "$ref": String("#/definitions/Filter"), - }, - }, - "definitions": Object { - "Filter": Object { - "description": String("the filter input"), - "type": String("object"), - "properties": Object { - "field": Object { - "description": String("the filter.field field"), - "type": String("string"), - }, - "filter": Object { - "description": String("the filter.filter field"), - "$ref": String("#/definitions/Filter"), - }, - }, - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "##); - } - - #[test] - fn with_variable_overrides() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: ID, $name: String) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: Some(HashMap::from([( - "id".to_string(), - serde_json::Value::String("v".to_string()), - )])), - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r#" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object { - "name": Object { - "type": String("string"), - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "#); - } - - #[test] - fn input_schema_includes_variable_descriptions() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($idArg: ID) { customQuery(id: $idArg) { id } }" - .to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": { - "idArg": { - "description": "id description", - "type": "string" - } - } - } - "#); - } - - #[test] - fn input_schema_includes_joined_variable_descriptions_if_multiple() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($idArg: ID, $flag: Boolean) { customQuery(id: $idArg, flag: $flag) { id @skip(if: $flag) } }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": { - "flag": { - "description": "Skipped when true.#a flag", - "type": "boolean" - }, - "idArg": { - "description": "id description", - "type": "string" - } - } - } - "#); - } - - #[test] - fn input_schema_includes_directive_variable_descriptions() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($idArg: ID, $skipArg: Boolean) { customQuery(id: $idArg) { id @skip(if: $skipArg) } }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": { - "idArg": { - "description": "id description", - "type": "string" - }, - "skipArg": { - "description": "Skipped when true.", - "type": "boolean" - } - } - } - "#); - } - - #[test] - fn test_operation_name_with_named_query() { - let source_text = "query GetUser($id: ID!) { user(id: $id) { name email } }"; - let raw_op = RawOperation { - source_text: source_text.to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }; - let operation = - Operation::from_document(raw_op, &SCHEMA, None, MutationMode::None, false, false) - .unwrap() - .unwrap(); - - let op_details = operation.operation(Value::Null).unwrap(); - assert_eq!(op_details.operation_name, Some(String::from("GetUser"))); - } - - #[test] - fn test_operation_name_with_named_mutation() { - let source_text = - "mutation CreateUser($input: UserInput!) { createUser(input: $input) { id name } }"; - let raw_op = RawOperation { - source_text: source_text.to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }; - let operation = - Operation::from_document(raw_op, &SCHEMA, None, MutationMode::Explicit, false, false) - .unwrap() - .unwrap(); - - let op_details = operation.operation(Value::Null).unwrap(); - assert_eq!(op_details.operation_name, Some(String::from("CreateUser"))); - } - - #[test] - fn operation_variable_comments_override_schema_descriptions() { - let operation = Operation::from_document( - RawOperation { - source_text: "# operation description\nquery QueryName(# id comment override\n$idArg: ID) { customQuery(id: $idArg) { id } }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": { - "idArg": { - "description": "id comment override", - "type": "string" - } - } - } - "#); - } - - #[test] - fn operation_variable_comment_override_supports_multiline_comments() { - let operation = Operation::from_document( - RawOperation { - source_text: "# operation description\nquery QueryName(# id comment override\n # multi-line comment \n$idArg: ID) { customQuery(id: $idArg) { id } }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": { - "idArg": { - "description": "id comment override\n multi-line comment", - "type": "string" - } - } - } - "#); - } - - #[test] - fn comment_with_parens_has_comments_extracted_correctly() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName # a comment (with parens)\n(# id comment override\n # multi-line comment \n$idArg: ID) { customQuery(id: $idArg) { id } }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": { - "idArg": { - "description": "id comment override\n multi-line comment", - "type": "string" - } - } - } - "#); - } - - #[test] - fn multiline_comment_with_odd_spacing_and_parens_has_comments_extracted_correctly() { - let operation = Operation::from_document( - RawOperation { - source_text: "# operation comment\n\nquery QueryName # a comment \n# extra space\n\n\n# blank lines (with parens)\n\n# another (paren)\n(# id comment override\n # multi-line comment \n$idArg: ID\n, \n# a flag\n$flag: Boolean) { customQuery(id: $idArg, skip: $flag) { id } }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": { - "flag": { - "description": "a flag", - "type": "boolean" - }, - "idArg": { - "description": "id comment override\n multi-line comment", - "type": "string" - } - } - } - "#); - } - - #[test] - fn operation_with_no_variables_is_handled_properly() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName { customQuery(id: \"123\") { id } }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": {} - } - "#); - } - - #[test] - fn commas_between_variables_are_ignored() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName(# id arg\n $idArg: ID,,\n,,\n # a flag\n $flag: Boolean, ,,) { customQuery(id: $idArg, flag: $flag) { id } }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": { - "flag": { - "description": "a flag", - "type": "boolean" - }, - "idArg": { - "description": "id arg", - "type": "string" - } - } - } - "#); - } - - #[test] - fn input_schema_include_properties_field_even_when_operation_has_no_input_args() { - let operation = Operation::from_document( - RawOperation { - source_text: "query TestOp { testOp { id } }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": {} - } - "#); - } - - #[test] - fn nullable_list_of_nullable_input_objects() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($objects: [RealInputObject]) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r##" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object { - "objects": Object { - "type": String("array"), - "items": Object { - "oneOf": Array [ - Object { - "$ref": String("#/definitions/RealInputObject"), - }, - Object { - "type": String("null"), - }, - ], - }, - }, - }, - "definitions": Object { - "RealInputObject": Object { - "type": String("object"), - "required": Array [ - String("required"), - ], - "properties": Object { - "optional": Object { - "description": String("optional is a input field that is optional"), - "type": String("string"), - }, - "required": Object { - "description": String("required is a input field that is required"), - "type": String("string"), - }, - }, - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "##); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r##" - { - "type": "object", - "properties": { - "objects": { - "type": "array", - "items": { - "oneOf": [ - { - "$ref": "#/definitions/RealInputObject" - }, - { - "type": "null" - } - ] - } - } - }, - "definitions": { - "RealInputObject": { - "type": "object", - "required": [ - "required" - ], - "properties": { - "optional": { - "description": "optional is a input field that is optional", - "type": "string" - }, - "required": { - "description": "required is a input field that is required", - "type": "string" - } - } - } - } - } - "##); - } - - #[test] - fn non_nullable_list_of_non_nullable_input_objects() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($objects: [RealInputObject!]!) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r##" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "required": Array [ - String("objects"), - ], - "properties": Object { - "objects": Object { - "type": String("array"), - "items": Object { - "$ref": String("#/definitions/RealInputObject"), - }, - }, - }, - "definitions": Object { - "RealInputObject": Object { - "type": String("object"), - "required": Array [ - String("required"), - ], - "properties": Object { - "optional": Object { - "description": String("optional is a input field that is optional"), - "type": String("string"), - }, - "required": Object { - "description": String("required is a input field that is required"), - "type": String("string"), - }, - }, - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "##); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r##" - { - "type": "object", - "required": [ - "objects" - ], - "properties": { - "objects": { - "type": "array", - "items": { - "$ref": "#/definitions/RealInputObject" - } - } - }, - "definitions": { - "RealInputObject": { - "type": "object", - "required": [ - "required" - ], - "properties": { - "optional": { - "description": "optional is a input field that is optional", - "type": "string" - }, - "required": { - "description": "required is a input field that is required", - "type": "string" - } - } - } - } - } - "##); - } -} +//! Operations +//! +//! This module includes transformation utilities that convert GraphQL operations +//! into MCP tools. + +mod mutation_mode; +mod operation; +mod operation_source; +mod raw_operation; +mod schema_walker; + +pub use mutation_mode::MutationMode; +pub use operation::{Operation, operation_defs, operation_name}; +pub use operation_source::OperationSource; +pub use raw_operation::RawOperation; diff --git a/crates/apollo-mcp-server/src/operations/mutation_mode.rs b/crates/apollo-mcp-server/src/operations/mutation_mode.rs new file mode 100644 index 00000000..9c7692d7 --- /dev/null +++ b/crates/apollo-mcp-server/src/operations/mutation_mode.rs @@ -0,0 +1,14 @@ +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Default, Debug, Deserialize, Serialize, PartialEq, Copy, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub enum MutationMode { + /// Don't allow any mutations + #[default] + None, + /// Allow explicit mutations, but don't allow the LLM to build them + Explicit, + /// Allow the LLM to build mutations + All, +} diff --git a/crates/apollo-mcp-server/src/operations/operation.rs b/crates/apollo-mcp-server/src/operations/operation.rs new file mode 100644 index 00000000..6a0ac72e --- /dev/null +++ b/crates/apollo-mcp-server/src/operations/operation.rs @@ -0,0 +1,3059 @@ +use std::collections::HashMap; + +use apollo_compiler::{ + Node, Schema as GraphqlSchema, + ast::{Definition, Document, OperationDefinition, OperationType, Selection, Type}, + parser::Parser, + schema::ExtendedType, +}; +use http::{HeaderMap, HeaderValue}; +use regex::Regex; +use rmcp::model::{ErrorCode, Tool, ToolAnnotations}; +use schemars::{Schema, json_schema}; +use serde::Serialize; +use serde_json::{Map, Value}; +use tracing::{debug, info, warn}; + +use crate::{ + custom_scalar_map::CustomScalarMap, + errors::{McpError, OperationError}, + graphql::{self, OperationDetails}, + schema_tree_shake::{DepthLimit, SchemaTreeShaker}, +}; + +use super::{MutationMode, RawOperation, schema_walker}; + +/// A valid GraphQL operation +#[derive(Debug, Clone, Serialize)] +pub struct Operation { + tool: Tool, + inner: RawOperation, + operation_name: String, +} + +impl AsRef for Operation { + fn as_ref(&self) -> &Tool { + &self.tool + } +} + +impl From for Tool { + fn from(value: Operation) -> Tool { + value.tool + } +} + +impl Operation { + pub(crate) fn into_inner(self) -> RawOperation { + self.inner + } + + pub fn from_document( + raw_operation: RawOperation, + graphql_schema: &GraphqlSchema, + custom_scalar_map: Option<&CustomScalarMap>, + mutation_mode: MutationMode, + disable_type_description: bool, + disable_schema_description: bool, + ) -> Result, OperationError> { + if let Some((document, operation, comments)) = operation_defs( + &raw_operation.source_text, + mutation_mode != MutationMode::None, + raw_operation.source_path.clone(), + )? { + let operation_name = match operation_name(&operation, raw_operation.source_path.clone()) + { + Ok(name) => name, + Err(OperationError::MissingName { + source_path, + operation, + }) => { + if let Some(path) = source_path { + warn!("Skipping unnamed operation in {path}: {operation}"); + } else { + warn!("Skipping unnamed operation: {operation}"); + } + return Ok(None); + } + Err(e) => return Err(e), + }; + let variable_description_overrides = + variable_description_overrides(&raw_operation.source_text, &operation); + let mut tree_shaker = SchemaTreeShaker::new(graphql_schema); + tree_shaker.retain_operation(&operation, &document, DepthLimit::Unlimited); + + let description = Self::tool_description( + comments, + &mut tree_shaker, + graphql_schema, + &operation, + disable_type_description, + disable_schema_description, + ); + + let mut object = serde_json::to_value(get_json_schema( + &operation, + tree_shaker.argument_descriptions(), + &variable_description_overrides, + graphql_schema, + custom_scalar_map, + raw_operation.variables.as_ref(), + ))?; + + // make sure that the properties field exists since schemas::ObjectValidation is + // configured to skip empty maps (in the case where there are no input args) + ensure_properties_exists(&mut object); + + let Value::Object(schema) = object else { + return Err(OperationError::Internal( + "Schemars should have returned an object".to_string(), + )); + }; + + let tool: Tool = Tool::new(operation_name.clone(), description, schema).annotate( + ToolAnnotations::new() + .read_only(operation.operation_type != OperationType::Mutation), + ); + let character_count = tool_character_length(&tool); + match character_count { + Ok(length) => info!( + "Tool {} loaded with a character count of {}. Estimated tokens: {}", + operation_name, + length, + length / 4 // We don't know the tokenization algorithm, so we just use 4 characters per token as a rough estimate. https://docs.anthropic.com/en/docs/resources/glossary#tokens + ), + Err(_) => info!( + "Tool {} loaded with an unknown character count", + operation_name + ), + } + Ok(Some(Operation { + tool, + inner: raw_operation, + operation_name, + })) + } else { + Ok(None) + } + } + + /// Generate a description for an operation based on documentation in the schema + fn tool_description( + comments: Option, + tree_shaker: &mut SchemaTreeShaker, + graphql_schema: &GraphqlSchema, + operation_def: &Node, + disable_type_description: bool, + disable_schema_description: bool, + ) -> String { + let comment_description = extract_and_format_comments(comments); + + match comment_description { + Some(description) => description, + None => { + // Add the tree-shaken types to the end of the tool description + let mut lines = vec![]; + if !disable_type_description { + let descriptions = operation_def + .selection_set + .iter() + .filter_map(|selection| { + match selection { + Selection::Field(field) => { + let field_name = field.name.to_string(); + let operation_type = operation_def.operation_type; + if let Some(root_name) = + graphql_schema.root_operation(operation_type) + { + // Find the root field referenced by the operation + let root = graphql_schema.get_object(root_name)?; + let field_definition = root + .fields + .iter() + .find(|(name, _)| { + let name = name.to_string(); + name == field_name + }) + .map(|(_, field_definition)| { + field_definition.node.clone() + }); + + // Add the root field description to the tool description + let field_description = field_definition + .clone() + .and_then(|field| field.description.clone()) + .map(|node| node.to_string()); + + // Add information about the return type + let ty = field_definition.map(|field| field.ty.clone()); + let type_description = + ty.as_ref().map(Self::type_description); + + Some( + vec![field_description, type_description] + .into_iter() + .flatten() + .collect::>() + .join("\n"), + ) + } else { + None + } + } + _ => None, + } + }) + .collect::>() + .join("\n---\n"); + + // Add the tree-shaken types to the end of the tool description + + lines.push(descriptions); + } + if !disable_schema_description { + let shaken_schema = + tree_shaker.shaken().unwrap_or_else(|schema| schema.partial); + + let mut types = shaken_schema + .types + .iter() + .filter(|(_name, extended_type)| { + !extended_type.is_built_in() + && matches!( + extended_type, + ExtendedType::Object(_) + | ExtendedType::Scalar(_) + | ExtendedType::Enum(_) + | ExtendedType::Interface(_) + | ExtendedType::Union(_) + ) + && graphql_schema + .root_operation(operation_def.operation_type) + .is_none_or(|op_name| extended_type.name() != op_name) + && graphql_schema + .root_operation(OperationType::Query) + .is_none_or(|op_name| extended_type.name() != op_name) + }) + .peekable(); + if types.peek().is_some() { + lines.push(String::from("---")); + } + + for ty in types { + lines.push(ty.1.serialize().to_string()); + } + } + lines.join("\n") + } + } + } + + fn type_description(ty: &Type) -> String { + let type_name = ty.inner_named_type(); + let mut lines = vec![]; + let optional = if ty.is_non_null() { + "" + } else { + "is optional and " + }; + let array = if ty.is_list() { + "is an array of type" + } else { + "has type" + }; + lines.push(format!( + "The returned value {optional}{array} `{type_name}`" + )); + + lines.join("\n") + } +} + +impl graphql::Executable for Operation { + fn persisted_query_id(&self) -> Option { + // TODO: id was being overridden, should we be returning? Should this be behind a flag? self.inner.persisted_query_id.clone() + None + } + + fn operation(&self, _input: Value) -> Result { + Ok(OperationDetails { + query: self.inner.source_text.clone(), + operation_name: Some(self.operation_name.clone()), + }) + } + + fn variables(&self, input_variables: Value) -> Result { + if let Some(raw_variables) = self.inner.variables.as_ref() { + let mut variables = match input_variables { + Value::Null => Ok(serde_json::Map::new()), + Value::Object(obj) => Ok(obj.clone()), + _ => Err(McpError::new( + ErrorCode::INVALID_PARAMS, + "Invalid input".to_string(), + None, + )), + }?; + + raw_variables.iter().try_for_each(|(key, value)| { + if variables.contains_key(key) { + Err(McpError::new( + ErrorCode::INVALID_PARAMS, + "No such parameter: {key}", + None, + )) + } else { + variables.insert(key.clone(), value.clone()); + Ok(()) + } + })?; + + Ok(Value::Object(variables)) + } else { + Ok(input_variables) + } + } + + fn headers(&self, default_headers: &HeaderMap) -> HeaderMap { + match self.inner.headers.as_ref() { + None => default_headers.clone(), + Some(raw_headers) if default_headers.is_empty() => raw_headers.clone(), + Some(raw_headers) => { + let mut headers = default_headers.clone(); + raw_headers.iter().for_each(|(key, value)| { + if headers.contains_key(key) { + tracing::debug!( + "Header {} has a default value, overwriting with operation value", + key + ); + } + headers.insert(key, value.clone()); + }); + headers + } + } + } +} + +#[allow(clippy::type_complexity)] +pub fn operation_defs( + source_text: &str, + allow_mutations: bool, + source_path: Option, +) -> Result, Option)>, OperationError> { + let source_path_clone = source_path.clone(); + let document = Parser::new() + .parse_ast( + source_text, + source_path_clone.unwrap_or_else(|| "operation.graphql".to_string()), + ) + .map_err(|e| OperationError::GraphQLDocument(Box::new(e)))?; + let mut last_offset: Option = Some(0); + let mut operation_defs = document.definitions.clone().into_iter().filter_map(|def| { + let description = match def.location() { + Some(source_span) => { + let description = last_offset + .map(|start_offset| &source_text[start_offset..source_span.offset()]); + last_offset = Some(source_span.end_offset()); + description + } + None => { + last_offset = None; + None + } + }; + + match def { + Definition::OperationDefinition(operation_def) => { + Some((operation_def, description)) + } + Definition::FragmentDefinition(_) => None, + _ => { + eprintln!("Schema definitions were passed in, but only operations and fragments are allowed"); + None + } + } + }); + + let (operation, comments) = match (operation_defs.next(), operation_defs.next()) { + (None, _) => { + return Err(OperationError::NoOperations { source_path }); + } + (_, Some(_)) => { + return Err(OperationError::TooManyOperations { + source_path, + count: 2 + operation_defs.count(), + }); + } + (Some(op), None) => op, + }; + + match operation.operation_type { + OperationType::Subscription => { + debug!( + "Skipping subscription operation {}", + operation_name(&operation, source_path)? + ); + return Ok(None); + } + OperationType::Mutation => { + if !allow_mutations { + warn!( + "Skipping mutation operation {}", + operation_name(&operation, source_path)? + ); + return Ok(None); + } + } + OperationType::Query => {} + } + + Ok(Some((document, operation, comments.map(|c| c.to_string())))) +} + +pub fn operation_name( + operation: &Node, + source_path: Option, +) -> Result { + Ok(operation + .name + .as_ref() + .ok_or_else(|| OperationError::MissingName { + source_path, + operation: operation.serialize().no_indent().to_string(), + })? + .to_string()) +} + +pub fn variable_description_overrides( + source_text: &str, + operation_definition: &Node, +) -> HashMap { + let mut argument_overrides_map: HashMap = HashMap::new(); + let mut last_offset = find_opening_parens_offset(source_text, operation_definition); + operation_definition + .variables + .iter() + .for_each(|v| match v.location() { + Some(source_span) => { + let comment = last_offset + .map(|start_offset| &source_text[start_offset..source_span.offset()]); + + if let Some(description) = comment.filter(|d| !d.is_empty() && d.contains('#')) + && let Some(description) = + extract_and_format_comments(Some(description.to_string())) + { + argument_overrides_map.insert(v.name.to_string(), description); + } + + last_offset = Some(source_span.end_offset()); + } + None => { + last_offset = None; + } + }); + + argument_overrides_map +} + +pub fn find_opening_parens_offset( + source_text: &str, + operation_definition: &Node, +) -> Option { + let regex = match Regex::new(r"(?m)^\s*\(") { + Ok(regex) => regex, + Err(_) => return None, + }; + + operation_definition + .name + .as_ref() + .and_then(|n| n.location()) + .map(|span| { + regex + .find(source_text[span.end_offset()..].as_ref()) + .map(|m| m.start() + m.len() + span.end_offset()) + .unwrap_or(0) + }) +} + +pub fn extract_and_format_comments(comments: Option) -> Option { + comments.and_then(|comments| { + let content = Regex::new(r"(\n|^)(\s*,*)*#") + .ok()? + .replace_all(comments.as_str(), "$1"); + let trimmed = content.trim(); + + if trimmed.is_empty() { + None + } else { + Some(trimmed.to_string()) + } + }) +} + +fn ensure_properties_exists(json_object: &mut Value) { + if let Some(obj_type) = json_object.get("type") + && obj_type == "object" + && let Some(obj_map) = json_object.as_object_mut() + { + let props = obj_map + .entry("properties") + .or_insert_with(|| Value::Object(serde_json::Map::new())); + if !props.is_object() { + *props = Value::Object(serde_json::Map::new()); + } + } +} + +fn tool_character_length(tool: &Tool) -> Result { + let tool_schema_string = serde_json::to_string_pretty(&serde_json::json!(tool.input_schema))?; + Ok(tool.name.len() + + tool.description.as_ref().map(|d| d.len()).unwrap_or(0) + + tool_schema_string.len()) +} + +fn get_json_schema( + operation: &Node, + schema_argument_descriptions: &HashMap>, + argument_descriptions_overrides: &HashMap, + graphql_schema: &GraphqlSchema, + custom_scalar_map: Option<&CustomScalarMap>, + variable_overrides: Option<&HashMap>, +) -> Schema { + // Default initialize the schema with the bare minimum needed to be a valid object + let mut schema = json_schema!({"type": "object", "properties": {}}); + let mut definitions = Map::new(); + + // TODO: Can this be unwrapped to use `schema_walker::walk` instead? This functionality is doubled + // in some cases. + operation.variables.iter().for_each(|variable| { + let variable_name = variable.name.to_string(); + if !variable_overrides + .map(|o| o.contains_key(&variable_name)) + .unwrap_or_default() + { + // use overridden description if there is one, otherwise use the schema description + let description = argument_descriptions_overrides + .get(&variable_name) + .cloned() + .or_else(|| { + schema_argument_descriptions + .get(&variable_name) + .filter(|d| !d.is_empty()) + .map(|d| d.join("#")) + }); + + let nested = schema_walker::type_to_schema( + variable.ty.as_ref(), + graphql_schema, + &mut definitions, + custom_scalar_map, + description, + ); + schema + .ensure_object() + .entry("properties") + .or_insert(Value::Object(Default::default())) + .as_object_mut() + .get_or_insert(&mut Map::default()) + .insert(variable_name.clone(), nested.into()); + + if variable.ty.is_non_null() { + schema + .ensure_object() + .entry("required") + .or_insert(serde_json::Value::Array(Vec::new())) + .as_array_mut() + .get_or_insert(&mut Vec::default()) + .push(variable_name.into()); + } + } + }); + + // Add the definitions to the overall schema if needed + if !definitions.is_empty() { + schema + .ensure_object() + .insert("definitions".to_string(), definitions.into()); + } + + schema +} + +#[cfg(test)] +mod tests { + use std::{collections::HashMap, str::FromStr as _, sync::LazyLock}; + + use apollo_compiler::{Schema, parser::Parser, validation::Valid}; + use rmcp::model::Tool; + use serde_json::Value; + use tracing_test::traced_test; + + use crate::{ + custom_scalar_map::CustomScalarMap, + graphql::Executable as _, + operations::{MutationMode, Operation, RawOperation}, + }; + + // Example schema for tests + static SCHEMA: LazyLock> = LazyLock::new(|| { + Schema::parse( + r#" + type Query { + id: String + enum: RealEnum + customQuery(""" id description """ id: ID!, """ a flag """ flag: Boolean): OutputType + testOp: OpResponse + } + type Mutation {id: String } + + """ + RealCustomScalar exists + """ + scalar RealCustomScalar + input RealInputObject { + """ + optional is a input field that is optional + """ + optional: String + + """ + required is a input field that is required + """ + required: String! + } + + type OpResponse { + id: String + } + + """ + the description for the enum + """ + enum RealEnum { + """ + ENUM_VALUE_1 is a value + """ + ENUM_VALUE_1 + + """ + ENUM_VALUE_2 is a value + """ + ENUM_VALUE_2 + } + + """ + custom output type + """ + type OutputType { + id: ID! + } + "#, + "operation.graphql", + ) + .expect("schema should parse") + .validate() + .expect("schema should be valid") + }); + + /// Serializes the input to JSON, sorting the object keys + macro_rules! to_sorted_json { + ($json:expr) => {{ + let mut j = serde_json::json!($json); + j.sort_all_objects(); + + j + }}; + } + + #[test] + fn nullable_named_type() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: ID) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "id": Object { + "type": String("string"), + }, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + + let json = to_sorted_json!(tool.input_schema); + insta::assert_snapshot!(serde_json::to_string_pretty(&json).unwrap(), @r#" + { + "properties": { + "id": { + "type": "string" + } + }, + "type": "object" + } + "#); + } + + #[test] + fn non_nullable_named_type() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: ID!) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "id": Object { + "type": String("string"), + }, + }, + "required": Array [ + String("id"), + ], + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r###" + { + "type": "object", + "properties": { + "id": { + "type": "string" + } + }, + "required": [ + "id" + ] + } + "###); + } + + #[test] + fn non_nullable_list_of_nullable_named_type() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: [ID]!) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "id": Object { + "type": String("array"), + "items": Object { + "oneOf": Array [ + Object { + "type": String("string"), + }, + Object { + "type": String("null"), + }, + ], + }, + }, + }, + "required": Array [ + String("id"), + ], + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r###" + { + "type": "object", + "properties": { + "id": { + "type": "array", + "items": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + } + } + }, + "required": [ + "id" + ] + } + "###); + } + + #[test] + fn non_nullable_list_of_non_nullable_named_type() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: [ID!]!) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "id": Object { + "type": String("array"), + "items": Object { + "type": String("string"), + }, + }, + }, + "required": Array [ + String("id"), + ], + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r###" + { + "type": "object", + "properties": { + "id": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "id" + ] + } + "###); + } + + #[test] + fn nullable_list_of_nullable_named_type() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: [ID]) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "id": Object { + "type": String("array"), + "items": Object { + "oneOf": Array [ + Object { + "type": String("string"), + }, + Object { + "type": String("null"), + }, + ], + }, + }, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" + { + "type": "object", + "properties": { + "id": { + "type": "array", + "items": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + } + } + } + } + "#); + } + + #[test] + fn nullable_list_of_non_nullable_named_type() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: [ID!]) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "id": Object { + "type": String("array"), + "items": Object { + "type": String("string"), + }, + }, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" + { + "type": "object", + "properties": { + "id": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + "#); + } + + #[test] + fn nullable_list_of_nullable_lists_of_nullable_named_types() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: [[ID]]) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "id": Object { + "type": String("array"), + "items": Object { + "oneOf": Array [ + Object { + "type": String("array"), + "items": Object { + "oneOf": Array [ + Object { + "type": String("string"), + }, + Object { + "type": String("null"), + }, + ], + }, + }, + Object { + "type": String("null"), + }, + ], + }, + }, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" + { + "type": "object", + "properties": { + "id": { + "type": "array", + "items": { + "oneOf": [ + { + "type": "array", + "items": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + } + }, + { + "type": "null" + } + ] + } + } + } + } + "#); + } + + #[test] + fn nullable_input_object() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: RealInputObject) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "id": Object { + "$ref": String("#/definitions/RealInputObject"), + }, + }, + "definitions": Object { + "RealInputObject": Object { + "type": String("object"), + "properties": Object { + "optional": Object { + "description": String("optional is a input field that is optional"), + "type": String("string"), + }, + "required": Object { + "description": String("required is a input field that is required"), + "type": String("string"), + }, + }, + "required": Array [ + String("required"), + ], + }, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + } + + #[test] + fn non_nullable_enum() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: RealEnum!) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "id": Object { + "$ref": String("#/definitions/RealEnum"), + }, + }, + "required": Array [ + String("id"), + ], + "definitions": Object { + "RealEnum": Object { + "description": String("the description for the enum\n\nValues:\nENUM_VALUE_1: ENUM_VALUE_1 is a value\nENUM_VALUE_2: ENUM_VALUE_2 is a value"), + "type": String("string"), + "enum": Array [ + String("ENUM_VALUE_1"), + String("ENUM_VALUE_2"), + ], + }, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + } + + #[test] + fn multiple_operations_should_error() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName { id } query QueryName { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: Some("operation.graphql".to_string()), + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ); + insta::assert_debug_snapshot!(operation, @r#" + Err( + TooManyOperations { + source_path: Some( + "operation.graphql", + ), + count: 2, + }, + ) + "#); + } + + #[test] + #[traced_test] + fn unnamed_operations_should_be_skipped() { + let operation = Operation::from_document( + RawOperation { + source_text: "query { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: Some("operation.graphql".to_string()), + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ); + assert!(operation.unwrap().is_none()); + + logs_assert(|lines: &[&str]| { + lines + .iter() + .filter(|line| line.contains("WARN")) + .any(|line| { + line.contains("Skipping unnamed operation in operation.graphql: { id }") + }) + .then_some(()) + .ok_or("Expected warning about unnamed operation in logs".to_string()) + }); + } + + #[test] + fn no_operations_should_error() { + let operation = Operation::from_document( + RawOperation { + source_text: "fragment Test on Query { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: Some("operation.graphql".to_string()), + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ); + insta::assert_debug_snapshot!(operation, @r#" + Err( + NoOperations { + source_path: Some( + "operation.graphql", + ), + }, + ) + "#); + } + + #[test] + fn schema_should_error() { + let operation = Operation::from_document( + RawOperation { + source_text: "type Query { id: String }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ); + insta::assert_debug_snapshot!(operation, @r" + Err( + NoOperations { + source_path: None, + }, + ) + "); + } + + #[test] + #[traced_test] + fn unknown_type_should_be_any() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: FakeType) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + // Verify that a warning was logged + logs_assert(|lines: &[&str]| { + lines + .iter() + .filter(|line| line.contains("WARN")) + .any(|line| line.contains("Type not found in schema name=\"FakeType\"")) + .then_some(()) + .ok_or("Expected warning about unknown type in logs".to_string()) + }); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "id": Object {}, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + } + + #[test] + #[traced_test] + fn custom_scalar_without_map_should_be_any() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: RealCustomScalar) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + // Verify that a warning was logged + logs_assert(|lines: &[&str]| { + lines + .iter() + .filter(|line| line.contains("WARN")) + .any(|line| line.contains("custom scalars aren't currently supported without a custom_scalar_map name=\"RealCustomScalar\"")) + .then_some(()) + .ok_or("Expected warning about custom scalar without map in logs".to_string()) + }); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "id": Object { + "$ref": String("#/definitions/RealCustomScalar"), + }, + }, + "definitions": Object { + "RealCustomScalar": Object { + "description": String("RealCustomScalar exists"), + }, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + } + + #[test] + #[traced_test] + fn custom_scalar_with_map_but_not_found_should_error() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: RealCustomScalar) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + Some(&CustomScalarMap::from_str("{}").unwrap()), + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + // Verify that a warning was logged + logs_assert(|lines: &[&str]| { + lines + .iter() + .filter(|line| line.contains("WARN")) + .any(|line| { + line.contains( + "custom scalar missing from custom_scalar_map name=\"RealCustomScalar\"", + ) + }) + .then_some(()) + .ok_or("Expected warning about custom scalar missing in logs".to_string()) + }); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "id": Object { + "$ref": String("#/definitions/RealCustomScalar"), + }, + }, + "definitions": Object { + "RealCustomScalar": Object { + "description": String("RealCustomScalar exists"), + }, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + } + + #[test] + fn custom_scalar_with_map() { + let custom_scalar_map = + CustomScalarMap::from_str("{ \"RealCustomScalar\": { \"type\": \"string\" }}"); + + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: RealCustomScalar) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + custom_scalar_map.ok().as_ref(), + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "id": Object { + "$ref": String("#/definitions/RealCustomScalar"), + }, + }, + "definitions": Object { + "RealCustomScalar": Object { + "description": String("RealCustomScalar exists"), + "type": String("string"), + }, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + } + + #[test] + fn test_tool_description() { + const SCHEMA: &str = r#" + type Query { + """ + Get a list of A + """ + a(input: String!): [A]! + + """ + Get a B + """ + b: B + + """ + Get a Z + """ + z: Z + } + + """ + A + """ + type A { + c: String + d: D + } + + """ + B + """ + type B { + d: D + u: U + } + + """ + D + """ + type D { + e: E + f: String + g: String + } + + """ + E + """ + enum E { + """ + one + """ + ONE + """ + two + """ + TWO + } + + """ + F + """ + scalar F + + """ + U + """ + union U = M | W + + """ + M + """ + type M { + m: Int + } + + """ + W + """ + type W { + w: Int + } + + """ + Z + """ + type Z { + z: Int + zz: Int + zzz: Int + } + "#; + + let document = Parser::new().parse_ast(SCHEMA, "schema.graphql").unwrap(); + let schema = document.to_schema().unwrap(); + + let operation = Operation::from_document( + RawOperation { + source_text: r###" + query GetABZ($state: String!) { + a(input: $input) { + d { + e + } + } + b { + d { + ...JustF + } + u { + ... on M { + m + } + ... on W { + w + } + } + } + z { + ...JustZZZ + } + } + + fragment JustF on D { + f + } + + fragment JustZZZ on Z { + zzz + } + "### + .to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &schema, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + + insta::assert_snapshot!( + operation.tool.description.unwrap(), + @r#" + Get a list of A + The returned value is an array of type `A` + --- + Get a B + The returned value is optional and has type `B` + --- + Get a Z + The returned value is optional and has type `Z` + --- + """A""" + type A { + d: D + } + + """B""" + type B { + d: D + u: U + } + + """D""" + type D { + e: E + f: String + } + + """E""" + enum E { + """one""" + ONE + """two""" + TWO + } + + """U""" + union U = M | W + + """M""" + type M { + m: Int + } + + """W""" + type W { + w: Int + } + + """Z""" + type Z { + zzz: Int + } + "# + ); + } + + #[test] + fn tool_comment_description() { + let operation = Operation::from_document( + RawOperation { + source_text: r###" + # Overridden tool #description + query GetABZ($state: String!) { + b { + d { + f + } + } + } + "### + .to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + + insta::assert_snapshot!( + operation.tool.description.unwrap(), + @"Overridden tool #description" + ); + } + + #[test] + fn tool_empty_comment_description() { + let operation = Operation::from_document( + RawOperation { + source_text: r###" + # + + # + query GetABZ($state: String!) { + id + } + "### + .to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + + insta::assert_snapshot!( + operation.tool.description.unwrap(), + @"The returned value is optional and has type `String`" + ); + } + + #[test] + fn no_schema_description() { + let operation = Operation::from_document( + RawOperation { + source_text: r###"query GetABZ($state: String!) { id enum }"###.to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + true, + ) + .unwrap() + .unwrap(); + + insta::assert_snapshot!( + operation.tool.description.unwrap(), + @r" + The returned value is optional and has type `String` + --- + The returned value is optional and has type `RealEnum` + " + ); + } + + #[test] + fn no_type_description() { + let operation = Operation::from_document( + RawOperation { + source_text: r###"query GetABZ($state: String!) { id enum }"###.to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + true, + false, + ) + .unwrap() + .unwrap(); + + insta::assert_snapshot!( + operation.tool.description.unwrap(), + @r#" + --- + """the description for the enum""" + enum RealEnum { + """ENUM_VALUE_1 is a value""" + ENUM_VALUE_1 + """ENUM_VALUE_2 is a value""" + ENUM_VALUE_2 + } + "# + ); + } + + #[test] + fn no_type_description_or_schema_description() { + let operation = Operation::from_document( + RawOperation { + source_text: r###"query GetABZ($state: String!) { id enum }"###.to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + true, + true, + ) + .unwrap() + .unwrap(); + + insta::assert_snapshot!( + operation.tool.description.unwrap(), + @"" + ); + } + + #[test] + fn recursive_inputs() { + let operation = Operation::from_document( + RawOperation { + source_text: r###"query Test($filter: Filter){ + field(filter: $filter) { + id + } + }"### + .to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &Schema::parse( + r#" + """the filter input""" + input Filter { + """the filter.field field""" + field: String + """the filter.filter field""" + filter: Filter + } + type Query { + """the Query.field field""" + field( + """the filter argument""" + filter: Filter + ): String + } + "#, + "operation.graphql", + ) + .unwrap(), + None, + MutationMode::None, + true, + true, + ) + .unwrap() + .unwrap(); + + insta::assert_debug_snapshot!(operation.tool, @r###" + Tool { + name: "Test", + title: None, + description: Some( + "", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "filter": Object { + "description": String("the filter argument"), + "$ref": String("#/definitions/Filter"), + }, + }, + "definitions": Object { + "Filter": Object { + "description": String("the filter input"), + "type": String("object"), + "properties": Object { + "field": Object { + "description": String("the filter.field field"), + "type": String("string"), + }, + "filter": Object { + "description": String("the filter.filter field"), + "$ref": String("#/definitions/Filter"), + }, + }, + }, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + } + + #[test] + fn with_variable_overrides() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: ID, $name: String) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: Some(HashMap::from([( + "id".to_string(), + serde_json::Value::String("v".to_string()), + )])), + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "name": Object { + "type": String("string"), + }, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + } + + #[test] + fn input_schema_includes_variable_descriptions() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($idArg: ID) { customQuery(id: $idArg) { id } }" + .to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + let json = to_sorted_json!(tool.input_schema); + insta::assert_snapshot!(serde_json::to_string_pretty(&json).unwrap(), @r###" + { + "properties": { + "idArg": { + "description": "id description", + "type": "string" + } + }, + "type": "object" + } + "###); + } + + #[test] + fn input_schema_includes_joined_variable_descriptions_if_multiple() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($idArg: ID, $flag: Boolean) { customQuery(id: $idArg, flag: $flag) { id @skip(if: $flag) } }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + let json = to_sorted_json!(tool.input_schema); + insta::assert_snapshot!(serde_json::to_string_pretty(&json).unwrap(), @r###" + { + "properties": { + "flag": { + "description": "Skipped when true.#a flag", + "type": "boolean" + }, + "idArg": { + "description": "id description", + "type": "string" + } + }, + "type": "object" + } + "###); + } + + #[test] + fn input_schema_includes_directive_variable_descriptions() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($idArg: ID, $skipArg: Boolean) { customQuery(id: $idArg) { id @skip(if: $skipArg) } }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" + { + "type": "object", + "properties": { + "idArg": { + "description": "id description", + "type": "string" + }, + "skipArg": { + "description": "Skipped when true.", + "type": "boolean" + } + } + } + "#); + } + + #[test] + fn test_operation_name_with_named_query() { + let source_text = "query GetUser($id: ID!) { user(id: $id) { name email } }"; + let raw_op = RawOperation { + source_text: source_text.to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }; + let operation = + Operation::from_document(raw_op, &SCHEMA, None, MutationMode::None, false, false) + .unwrap() + .unwrap(); + + let op_details = operation.operation(Value::Null).unwrap(); + assert_eq!(op_details.operation_name, Some(String::from("GetUser"))); + } + + #[test] + fn test_operation_name_with_named_mutation() { + let source_text = + "mutation CreateUser($input: UserInput!) { createUser(input: $input) { id name } }"; + let raw_op = RawOperation { + source_text: source_text.to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }; + let operation = + Operation::from_document(raw_op, &SCHEMA, None, MutationMode::Explicit, false, false) + .unwrap() + .unwrap(); + + let op_details = operation.operation(Value::Null).unwrap(); + assert_eq!(op_details.operation_name, Some(String::from("CreateUser"))); + } + + #[test] + fn operation_variable_comments_override_schema_descriptions() { + let operation = Operation::from_document( + RawOperation { + source_text: "# operation description\nquery QueryName(# id comment override\n$idArg: ID) { customQuery(id: $idArg) { id } }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + let json = to_sorted_json!(tool.input_schema); + insta::assert_snapshot!(serde_json::to_string_pretty(&json).unwrap(), @r###" + { + "properties": { + "idArg": { + "description": "id comment override", + "type": "string" + } + }, + "type": "object" + } + "###); + } + + #[test] + fn operation_variable_comment_override_supports_multiline_comments() { + let operation = Operation::from_document( + RawOperation { + source_text: "# operation description\nquery QueryName(# id comment override\n # multi-line comment \n$idArg: ID) { customQuery(id: $idArg) { id } }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + let json = to_sorted_json!(tool.input_schema); + insta::assert_snapshot!(serde_json::to_string_pretty(&json).unwrap(), @r###" + { + "properties": { + "idArg": { + "description": "id comment override\n multi-line comment", + "type": "string" + } + }, + "type": "object" + } + "###); + } + + #[test] + fn comment_with_parens_has_comments_extracted_correctly() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName # a comment (with parens)\n(# id comment override\n # multi-line comment \n$idArg: ID) { customQuery(id: $idArg) { id } }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + let json = to_sorted_json!(tool.input_schema); + insta::assert_snapshot!(serde_json::to_string_pretty(&json).unwrap(), @r###" + { + "properties": { + "idArg": { + "description": "id comment override\n multi-line comment", + "type": "string" + } + }, + "type": "object" + } + "###); + } + + #[test] + fn multiline_comment_with_odd_spacing_and_parens_has_comments_extracted_correctly() { + let operation = Operation::from_document( + RawOperation { + source_text: "# operation comment\n\nquery QueryName # a comment \n# extra space\n\n\n# blank lines (with parens)\n\n# another (paren)\n(# id comment override\n # multi-line comment \n$idArg: ID\n, \n# a flag\n$flag: Boolean) { customQuery(id: $idArg, skip: $flag) { id } }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + let json = to_sorted_json!(tool.input_schema); + insta::assert_snapshot!(serde_json::to_string_pretty(&json).unwrap(), @r###" + { + "properties": { + "flag": { + "description": "a flag", + "type": "boolean" + }, + "idArg": { + "description": "id comment override\n multi-line comment", + "type": "string" + } + }, + "type": "object" + } + "###); + } + + #[test] + fn operation_with_no_variables_is_handled_properly() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName { customQuery(id: \"123\") { id } }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + let json = to_sorted_json!(tool.input_schema); + insta::assert_snapshot!(serde_json::to_string_pretty(&json).unwrap(), @r###" + { + "properties": {}, + "type": "object" + } + "###); + } + + #[test] + fn commas_between_variables_are_ignored() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName(# id arg\n $idArg: ID,,\n,,\n # a flag\n $flag: Boolean, ,,) { customQuery(id: $idArg, flag: $flag) { id } }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + let json = to_sorted_json!(tool.input_schema); + insta::assert_snapshot!(serde_json::to_string_pretty(&json).unwrap(), @r###" + { + "properties": { + "flag": { + "description": "a flag", + "type": "boolean" + }, + "idArg": { + "description": "id arg", + "type": "string" + } + }, + "type": "object" + } + "###); + } + + #[test] + fn input_schema_include_properties_field_even_when_operation_has_no_input_args() { + let operation = Operation::from_document( + RawOperation { + source_text: "query TestOp { testOp { id } }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + let json = to_sorted_json!(tool.input_schema); + insta::assert_snapshot!(serde_json::to_string_pretty(&json).unwrap(), @r#" + { + "properties": {}, + "type": "object" + } + "#); + } + + #[test] + fn nullable_list_of_nullable_input_objects() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($objects: [RealInputObject]) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "objects": Object { + "type": String("array"), + "items": Object { + "oneOf": Array [ + Object { + "$ref": String("#/definitions/RealInputObject"), + }, + Object { + "type": String("null"), + }, + ], + }, + }, + }, + "definitions": Object { + "RealInputObject": Object { + "type": String("object"), + "properties": Object { + "optional": Object { + "description": String("optional is a input field that is optional"), + "type": String("string"), + }, + "required": Object { + "description": String("required is a input field that is required"), + "type": String("string"), + }, + }, + "required": Array [ + String("required"), + ], + }, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + + let json = to_sorted_json!(tool.input_schema); + insta::assert_snapshot!(serde_json::to_string_pretty(&json).unwrap(), @r###" + { + "definitions": { + "RealInputObject": { + "properties": { + "optional": { + "description": "optional is a input field that is optional", + "type": "string" + }, + "required": { + "description": "required is a input field that is required", + "type": "string" + } + }, + "required": [ + "required" + ], + "type": "object" + } + }, + "properties": { + "objects": { + "items": { + "oneOf": [ + { + "$ref": "#/definitions/RealInputObject" + }, + { + "type": "null" + } + ] + }, + "type": "array" + } + }, + "type": "object" + } + "###); + } + + #[test] + fn non_nullable_list_of_non_nullable_input_objects() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($objects: [RealInputObject!]!) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "objects": Object { + "type": String("array"), + "items": Object { + "$ref": String("#/definitions/RealInputObject"), + }, + }, + }, + "required": Array [ + String("objects"), + ], + "definitions": Object { + "RealInputObject": Object { + "type": String("object"), + "properties": Object { + "optional": Object { + "description": String("optional is a input field that is optional"), + "type": String("string"), + }, + "required": Object { + "description": String("required is a input field that is required"), + "type": String("string"), + }, + }, + "required": Array [ + String("required"), + ], + }, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + + let json = to_sorted_json!(tool.input_schema); + insta::assert_snapshot!(serde_json::to_string_pretty(&json).unwrap(), @r###" + { + "definitions": { + "RealInputObject": { + "properties": { + "optional": { + "description": "optional is a input field that is optional", + "type": "string" + }, + "required": { + "description": "required is a input field that is required", + "type": "string" + } + }, + "required": [ + "required" + ], + "type": "object" + } + }, + "properties": { + "objects": { + "items": { + "$ref": "#/definitions/RealInputObject" + }, + "type": "array" + } + }, + "required": [ + "objects" + ], + "type": "object" + } + "###); + } + + #[test] + fn subscriptions() { + assert!( + Operation::from_document( + RawOperation { + source_text: "subscription SubscriptionName { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .is_none() + ); + } + + #[test] + fn mutation_mode_none() { + assert!( + Operation::from_document( + RawOperation { + source_text: "mutation MutationName { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .ok() + .unwrap() + .is_none() + ); + } + + #[test] + fn mutation_mode_explicit() { + let operation = Operation::from_document( + RawOperation { + source_text: "mutation MutationName { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::Explicit, + false, + false, + ) + .unwrap() + .unwrap(); + + insta::assert_debug_snapshot!(operation, @r###" + Operation { + tool: Tool { + name: "MutationName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object {}, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + false, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + }, + inner: RawOperation { + source_text: "mutation MutationName { id }", + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + operation_name: "MutationName", + } + "###); + } + + #[test] + fn mutation_mode_all() { + let operation = Operation::from_document( + RawOperation { + source_text: "mutation MutationName { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::All, + false, + false, + ) + .unwrap() + .unwrap(); + + insta::assert_debug_snapshot!(operation, @r###" + Operation { + tool: Tool { + name: "MutationName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object {}, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + false, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + }, + inner: RawOperation { + source_text: "mutation MutationName { id }", + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + operation_name: "MutationName", + } + "###); + } + + #[test] + fn no_variables() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object {}, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" + { + "type": "object", + "properties": {} + } + "#); + } +} diff --git a/crates/apollo-mcp-server/src/operations/operation_source.rs b/crates/apollo-mcp-server/src/operations/operation_source.rs new file mode 100644 index 00000000..d34b2a9b --- /dev/null +++ b/crates/apollo-mcp-server/src/operations/operation_source.rs @@ -0,0 +1,176 @@ +use std::{ + collections::HashMap, + fs, + path::PathBuf, + sync::{Arc, Mutex}, +}; + +use apollo_mcp_registry::{ + files, + platform_api::operation_collections::{ + collection_poller::CollectionSource, event::CollectionEvent, + }, + uplink::persisted_queries::{ManifestSource, event::Event as ManifestEvent}, +}; +use futures::{Stream, StreamExt as _}; +use tracing::warn; + +use crate::event::Event; + +use super::RawOperation; + +const OPERATION_DOCUMENT_EXTENSION: &str = "graphql"; + +/// The source of the operations exposed as MCP tools +#[derive(Clone)] +pub enum OperationSource { + /// GraphQL document files + Files(Vec), + + /// Persisted Query manifest + Manifest(ManifestSource), + + /// Operation collection + Collection(CollectionSource), + + /// No operations provided + None, +} + +impl OperationSource { + pub async fn into_stream(self) -> impl Stream { + match self { + OperationSource::Files(paths) => Self::stream_file_changes(paths).boxed(), + OperationSource::Manifest(manifest_source) => manifest_source + .into_stream() + .await + .map(|event| { + let ManifestEvent::UpdateManifest(operations) = event; + Event::OperationsUpdated( + operations.into_iter().map(RawOperation::from).collect(), + ) + }) + .boxed(), + OperationSource::Collection(collection_source) => collection_source + .into_stream() + .map(|event| match event { + CollectionEvent::UpdateOperationCollection(operations) => { + match operations + .iter() + .map(RawOperation::try_from) + .collect::, _>>() + { + Ok(operations) => Event::OperationsUpdated(operations), + Err(e) => Event::CollectionError(e), + } + } + CollectionEvent::CollectionError(error) => Event::CollectionError(error), + }) + .boxed(), + OperationSource::None => { + futures::stream::once(async { Event::OperationsUpdated(vec![]) }).boxed() + } + } + } + + fn stream_file_changes(paths: Vec) -> impl Stream { + let path_count = paths.len(); + let state = Arc::new(Mutex::new(HashMap::>::new())); + futures::stream::select_all(paths.into_iter().map(|path| { + let state = Arc::clone(&state); + files::watch(path.as_ref()) + .filter_map(move |_| { + let path = path.clone(); + let state = Arc::clone(&state); + async move { + let mut operations = Vec::new(); + if path.is_dir() { + // Handle a directory + if let Ok(entries) = fs::read_dir(&path) { + for entry in entries.flatten() { + let entry_path = entry.path(); + if entry_path.extension().and_then(|e| e.to_str()) + == Some(OPERATION_DOCUMENT_EXTENSION) + { + match fs::read_to_string(&entry_path) { + Ok(content) => { + // Be forgiving of empty files in the directory case. + // It likely means a new file was created in an editor, + // but the operation hasn't been written yet. + if !content.trim().is_empty() { + operations.push(RawOperation::from(( + content, + entry_path.to_str().map(|s| s.to_string()), + ))); + } + } + Err(e) => { + return Some(Event::OperationError( + e, + path.to_str().map(|s| s.to_string()), + )); + } + } + } + } + } + } else { + // Handle a single file + match fs::read_to_string(&path) { + Ok(content) => { + if !content.trim().is_empty() { + operations.push(RawOperation::from(( + content, + path.to_str().map(|s| s.to_string()), + ))); + } else { + warn!(?path, "Empty operation file"); + } + } + Err(e) => { + return Some(Event::OperationError( + e, + path.to_str().map(|s| s.to_string()), + )); + } + } + } + match state.lock() { + Ok(mut state) => { + state.insert(path.clone(), operations); + // All paths send an initial event on startup. To avoid repeated + // operation events on startup, wait until all paths have been + // loaded, then send a single event with the operations for all + // paths. + if state.len() == path_count { + Some(Event::OperationsUpdated( + state.values().flatten().cloned().collect::>(), + )) + } else { + None + } + } + Err(_) => Some(Event::OperationError( + std::io::Error::other("State mutex poisoned"), + path.to_str().map(|s| s.to_string()), + )), + } + } + }) + .boxed() + })) + .boxed() + } +} + +impl From for OperationSource { + fn from(manifest_source: ManifestSource) -> Self { + OperationSource::Manifest(manifest_source) + } +} + +impl From> for OperationSource { + fn from(paths: Vec) -> Self { + OperationSource::Files(paths) + } +} diff --git a/crates/apollo-mcp-server/src/operations/raw_operation.rs b/crates/apollo-mcp-server/src/operations/raw_operation.rs new file mode 100644 index 00000000..c850319c --- /dev/null +++ b/crates/apollo-mcp-server/src/operations/raw_operation.rs @@ -0,0 +1,144 @@ +use std::{collections::HashMap, str::FromStr as _}; + +use apollo_compiler::validation::Valid; +use apollo_mcp_registry::platform_api::operation_collections::{ + collection_poller::OperationData, error::CollectionError, +}; +use http::{HeaderMap, HeaderName, HeaderValue}; +use serde_json::Value; + +use crate::{custom_scalar_map::CustomScalarMap, errors::OperationError}; + +use super::{MutationMode, operation::Operation}; + +#[derive(Debug, Clone)] +pub struct RawOperation { + pub(super) source_text: String, + pub(super) persisted_query_id: Option, + pub(super) headers: Option>, + pub(super) variables: Option>, + pub(super) source_path: Option, +} + +impl RawOperation { + pub(crate) fn into_operation( + self, + schema: &Valid, + custom_scalars: Option<&CustomScalarMap>, + mutation_mode: MutationMode, + disable_type_description: bool, + disable_schema_description: bool, + ) -> Result, OperationError> { + Operation::from_document( + self, + schema, + custom_scalars, + mutation_mode, + disable_type_description, + disable_schema_description, + ) + } +} + +impl From<(String, Option)> for RawOperation { + fn from((source_text, source_path): (String, Option)) -> Self { + Self { + persisted_query_id: None, + source_text, + headers: None, + variables: None, + source_path, + } + } +} + +impl From<(String, String)> for RawOperation { + fn from((persisted_query_id, source_text): (String, String)) -> Self { + Self { + persisted_query_id: Some(persisted_query_id), + source_text, + headers: None, + variables: None, + source_path: None, + } + } +} + +impl TryFrom<&OperationData> for RawOperation { + type Error = CollectionError; + + fn try_from(operation_data: &OperationData) -> Result { + let variables = if let Some(variables) = operation_data.variables.as_ref() { + if variables.trim().is_empty() { + Some(HashMap::new()) + } else { + Some( + serde_json::from_str::>(variables) + .map_err(|_| CollectionError::InvalidVariables(variables.clone()))?, + ) + } + } else { + None + }; + + let headers = if let Some(headers) = operation_data.headers.as_ref() { + let mut header_map = HeaderMap::new(); + for header in headers { + header_map.insert( + HeaderName::from_str(&header.0).map_err(CollectionError::HeaderName)?, + HeaderValue::from_str(&header.1).map_err(CollectionError::HeaderValue)?, + ); + } + Some(header_map) + } else { + None + }; + + Ok(Self { + persisted_query_id: None, + source_text: operation_data.source_text.clone(), + headers, + variables, + source_path: None, + }) + } +} + +// TODO: This can be greatly simplified by using `serde::serialize_with` on the specific field that does not +// implement `Serialize`. +// Custom Serialize implementation for RawOperation +// This is needed because reqwest HeaderMap/HeaderValue/HeaderName don't derive Serialize +impl serde::Serialize for RawOperation { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut state = serializer.serialize_struct("RawOperation", 4)?; + state.serialize_field("source_text", &self.source_text)?; + if let Some(ref id) = self.persisted_query_id { + state.serialize_field("persisted_query_id", id)?; + } + if let Some(ref variables) = self.variables { + state.serialize_field("variables", variables)?; + } + if let Some(ref headers) = self.headers { + state.serialize_field( + "headers", + headers + .iter() + .map(|(name, value)| { + format!("{}: {}", name, value.to_str().unwrap_or_default()) + }) + .collect::>() + .join("\n") + .as_str(), + )?; + } + if let Some(ref path) = self.source_path { + state.serialize_field("source_path", path)?; + } + + state.end() + } +} diff --git a/crates/apollo-mcp-server/src/operations/schema_walker.rs b/crates/apollo-mcp-server/src/operations/schema_walker.rs new file mode 100644 index 00000000..0229d255 --- /dev/null +++ b/crates/apollo-mcp-server/src/operations/schema_walker.rs @@ -0,0 +1,46 @@ +//! JSON Schema generation utilities +//! +//! The types in this module generate JSON schemas for GraphQL types by walking +//! the types recursively. + +use apollo_compiler::{Schema as GraphQLSchema, ast::Type}; +use schemars::Schema; +use serde_json::{Map, Value}; + +use crate::custom_scalar_map::CustomScalarMap; + +mod name; +mod r#type; + +/// Convert a GraphQL type into a JSON Schema. +/// +/// Note: This is recursive, which might cause a stack overflow if the type is +/// sufficiently nested / complex. +pub fn type_to_schema( + r#type: &Type, + schema: &GraphQLSchema, + definitions: &mut Map, + custom_scalar_map: Option<&CustomScalarMap>, + description: Option, +) -> Schema { + r#type::Type { + cache: definitions, + custom_scalar_map, + description: &description, + schema, + r#type, + } + .into() +} + +/// Modifies a schema to include an optional description +fn with_desc(mut schema: Schema, description: &Option) -> Schema { + if let Some(desc) = description { + schema + .ensure_object() + .entry("description") + .or_insert(desc.clone().into()); + } + + schema +} diff --git a/crates/apollo-mcp-server/src/operations/schema_walker/name.rs b/crates/apollo-mcp-server/src/operations/schema_walker/name.rs new file mode 100644 index 00000000..c6452bda --- /dev/null +++ b/crates/apollo-mcp-server/src/operations/schema_walker/name.rs @@ -0,0 +1,199 @@ +use apollo_compiler::{Name as GraphQLName, Node, Schema as GraphQLSchema, schema::ExtendedType}; +use schemars::{Schema as JSONSchema, json_schema}; +use serde_json::{Map, Value}; +use tracing::warn; + +use crate::custom_scalar_map::CustomScalarMap; + +use super::{r#type::Type, with_desc}; + +/// A GraphQL Named Walker +pub(super) struct Name<'a> { + /// The definition cache which contains full schemas for nested types + pub(super) cache: &'a mut Map, + + /// Custom scalar map for supplementing information from the GraphQL schema + pub(super) custom_scalar_map: Option<&'a CustomScalarMap>, + + /// The optional description of the named type, from comments in the schema + pub(super) description: &'a Option, + + /// The actual named type to translate into a JSON schema + pub(super) name: &'a GraphQLName, + + /// The original GraphQL schema with all type information + pub(super) schema: &'a GraphQLSchema, +} + +impl From> for JSONSchema { + fn from( + Name { + cache, + custom_scalar_map, + description, + name, + schema, + }: Name, + ) -> Self { + let unknown_type = json_schema!({}); + + let result = match name.as_str() { + // Basic types map nicely + "String" | "ID" => json_schema!({"type": "string"}), + "Int" | "Float" => json_schema!({"type": "number"}), + "Boolean" => json_schema!({"type": "boolean"}), + + // If we've already cached it, then return the reference immediately + cached if cache.contains_key(cached) => { + JSONSchema::new_ref(format!("#/definitions/{cached}")) + } + + // Otherwise generate the dependent type + other => match schema.types.get(other) { + // Enums need to collect descriptions per field while also enumerating + // all possible values + Some(ExtendedType::Enum(r#enum)) => { + // Collect all fields such that each field is shown as + // : + let values = r#enum + .values + .iter() + .map(|(name, value)| { + format!( + "{}: {}", + name, + value + .description + .as_ref() + .map(|d| d.to_string()) + .unwrap_or_default() + ) + }) + .collect::>() + .join("\n"); + + // Consolidate all of the values such that we get a high-level + // description (from the schema) followed by its values + let description = format!( + "{}\n\nValues:\n{}", + r#enum + .description + .as_ref() + .map(Node::as_str) + .unwrap_or_default(), + values + ); + + cache.insert( + other.to_string(), + with_desc(json_schema!({ + "type": "string", + "enum": r#enum.values.iter().map(|(_, value)| serde_json::json!(value.value)).collect::>(), + }), + &Some(description), + ).into(), + ); + JSONSchema::new_ref(format!("#/definitions/{other}")) + } + + // Input types need to be traversed over their fields to ensure that they copy over + // nested structure. + Some(ExtendedType::InputObject(input)) => { + // Insert temporary value into map so any recursive references will not try to also create it. + cache.insert(other.to_string(), Default::default()); + + let mut input_schema = with_desc( + json_schema!({"type": "object", "properties": {}}), + &input.description.as_ref().map(Node::to_string), + ); + for (name, field) in input.fields.iter() { + let field_description = field.description.as_ref().map(|n| n.to_string()); + input_schema + .ensure_object() + .entry("properties") + .or_insert(Value::Object(Default::default())) + .as_object_mut() + .get_or_insert(&mut Map::default()) + .insert( + name.to_string(), + JSONSchema::from(Type { + cache, + custom_scalar_map, + description: &field_description, + schema, + r#type: &field.ty, + }) + .into(), + ); + + // Mark any non-nullable fields as being required + if field.is_required() { + input_schema + .ensure_object() + .entry("required") + .or_insert(Value::Array(Default::default())) + .as_array_mut() + .get_or_insert(&mut Vec::default()) + .push(name.to_string().into()); + } + } + + cache.insert(other.to_string(), input_schema.into()); + JSONSchema::new_ref(format!("#/definitions/{other}")) + } + + // Custom scalars need to be opaquely copied over as types with no further processing + Some(ExtendedType::Scalar(scalar)) => { + // The default scalar description should always be from the scalar in the schema itself + let default_scalar_description = + scalar.description.as_ref().map(Node::to_string); + + if let Some(custom_scalar_map) = custom_scalar_map { + if let Some(custom_scalar_schema_object) = custom_scalar_map.get(other) { + // The custom scalar schema might have an override for the description, so we extract it here. + let mut scalar_schema = custom_scalar_schema_object.clone(); + let description = scalar_schema + .ensure_object() + .get("description") + .and_then(Value::as_str) + .map(str::to_string); + + cache.insert( + other.to_string(), + with_desc( + custom_scalar_schema_object.clone(), + // The description could have been overridden by the custom schema, so we prioritize it here + &description.or(default_scalar_description), + ) + .into(), + ); + } else { + warn!(name=?other, "custom scalar missing from custom_scalar_map"); + cache.insert( + other.to_string(), + with_desc(JSONSchema::default(), &default_scalar_description) + .into(), + ); + } + } else { + warn!(name=?other, "custom scalars aren't currently supported without a custom_scalar_map"); + cache.insert( + other.to_string(), + with_desc(JSONSchema::default(), &default_scalar_description).into(), + ); + } + + JSONSchema::new_ref(format!("#/definitions/{other}")) + } + + // Anything else is unhandled + _ => { + warn!(name=?other, "Type not found in schema"); + unknown_type + } + }, + }; + + with_desc(result, description) + } +} diff --git a/crates/apollo-mcp-server/src/operations/schema_walker/type.rs b/crates/apollo-mcp-server/src/operations/schema_walker/type.rs new file mode 100644 index 00000000..e38df4a0 --- /dev/null +++ b/crates/apollo-mcp-server/src/operations/schema_walker/type.rs @@ -0,0 +1,74 @@ +use apollo_compiler::{Schema as GraphQLSchema, ast::Type as GraphQLType}; +use schemars::{Schema as JSONSchema, json_schema}; +use serde_json::{Map, Value}; + +use crate::custom_scalar_map::CustomScalarMap; + +use super::name::Name; + +pub(super) struct Type<'a> { + /// The definition cache which contains full schemas for nested types + pub(super) cache: &'a mut Map, + + /// Custom scalar map for supplementing information from the GraphQL schema + pub(super) custom_scalar_map: Option<&'a CustomScalarMap>, + + /// The optional description of the type, from comments in the schema + pub(super) description: &'a Option, + + /// The original GraphQL schema with all type information + pub(super) schema: &'a GraphQLSchema, + + /// The actual type to translate into a JSON schema + pub(super) r#type: &'a GraphQLType, +} + +impl From> for JSONSchema { + fn from( + Type { + cache, + custom_scalar_map, + description, + schema, + r#type, + }: Type, + ) -> Self { + // JSON Schema assumes that all properties are nullable unless there is a + // required field, so we treat cases the same here. + match r#type { + GraphQLType::List(list) | GraphQLType::NonNullList(list) => { + let nested_schema: JSONSchema = Type { + cache, + custom_scalar_map, + description, + schema, + r#type: list, + } + .into(); + + // Arrays, however, do need to specify that fields can be null + let nested_schema = if list.is_non_null() { + nested_schema + } else { + json_schema!({"oneOf": [ + nested_schema, + {"type": "null"}, + ]}) + }; + + json_schema!({ + "type": "array", + "items": nested_schema, + }) + } + + GraphQLType::Named(name) | GraphQLType::NonNullNamed(name) => JSONSchema::from(Name { + cache, + custom_scalar_map, + description, + name, + schema, + }), + } + } +} diff --git a/crates/apollo-mcp-server/src/schema_tree_shake.rs b/crates/apollo-mcp-server/src/schema_tree_shake.rs index 5d2fa7f7..d2896b3a 100644 --- a/crates/apollo-mcp-server/src/schema_tree_shake.rs +++ b/crates/apollo-mcp-server/src/schema_tree_shake.rs @@ -532,16 +532,15 @@ fn retain_argument_descriptions( ) { let operation_argument_name = operation_arguments.get(arg.name.as_str()); - if let Some(op_arg_name) = operation_argument_name { - if let Some(description) = arg.description.as_deref() { - if !description.trim().is_empty() { - let descriptions = tree_shaker - .arguments_descriptions - .entry(op_arg_name.to_string()) - .or_default(); - descriptions.push(description.trim().to_string()) - } - } + if let Some(op_arg_name) = operation_argument_name + && let Some(description) = arg.description.as_deref() + && !description.trim().is_empty() + { + let descriptions = tree_shaker + .arguments_descriptions + .entry(op_arg_name.to_string()) + .or_default(); + descriptions.push(description.trim().to_string()) } } diff --git a/crates/apollo-mcp-server/src/server.rs b/crates/apollo-mcp-server/src/server.rs index 96c0d772..6ef0153c 100644 --- a/crates/apollo-mcp-server/src/server.rs +++ b/crates/apollo-mcp-server/src/server.rs @@ -36,6 +36,7 @@ pub struct Server { mutation_mode: MutationMode, disable_type_description: bool, disable_schema_description: bool, + disable_auth_token_passthrough: bool, search_leaf_depth: usize, index_memory_bytes: usize, health_check: HealthCheckConfig, @@ -112,6 +113,7 @@ impl Server { mutation_mode: MutationMode, disable_type_description: bool, disable_schema_description: bool, + disable_auth_token_passthrough: bool, search_leaf_depth: usize, index_memory_bytes: usize, health_check: HealthCheckConfig, @@ -138,6 +140,7 @@ impl Server { mutation_mode, disable_type_description, disable_schema_description, + disable_auth_token_passthrough, search_leaf_depth, index_memory_bytes, health_check, diff --git a/crates/apollo-mcp-server/src/server/states.rs b/crates/apollo-mcp-server/src/server/states.rs index 81211cda..e154f010 100644 --- a/crates/apollo-mcp-server/src/server/states.rs +++ b/crates/apollo-mcp-server/src/server/states.rs @@ -44,6 +44,7 @@ struct Config { mutation_mode: MutationMode, disable_type_description: bool, disable_schema_description: bool, + disable_auth_token_passthrough: bool, search_leaf_depth: usize, index_memory_bytes: usize, health_check: HealthCheckConfig, @@ -76,6 +77,7 @@ impl StateMachine { mutation_mode: server.mutation_mode, disable_type_description: server.disable_type_description, disable_schema_description: server.disable_schema_description, + disable_auth_token_passthrough: server.disable_auth_token_passthrough, search_leaf_depth: server.search_leaf_depth, index_memory_bytes: server.index_memory_bytes, health_check: server.health_check, @@ -154,7 +156,7 @@ impl StateMachine { match Supergraph::new(&schema_state.sdl) { Ok(supergraph) => Ok(supergraph .to_api_schema(ApiSchemaOptions::default()) - .map_err(ServerError::Federation)? + .map_err(|e| ServerError::Federation(Box::new(e)))? .schema() .clone()), Err(_) => Schema::parse_and_validate(schema_state.sdl, "schema.graphql") diff --git a/crates/apollo-mcp-server/src/server/states/running.rs b/crates/apollo-mcp-server/src/server/states/running.rs index b1b69495..b1cc4e90 100644 --- a/crates/apollo-mcp-server/src/server/states/running.rs +++ b/crates/apollo-mcp-server/src/server/states/running.rs @@ -52,6 +52,7 @@ pub(super) struct Running { pub(super) mutation_mode: MutationMode, pub(super) disable_type_description: bool, pub(super) disable_schema_description: bool, + pub(super) disable_auth_token_passthrough: bool, pub(super) health_check: Option, } @@ -211,7 +212,9 @@ impl ServerHandler for Running { let mut headers = self.headers.clone(); if let Some(axum_parts) = context.extensions.get::() { // Optionally extract the validated token and propagate it to upstream servers if present - if let Some(token) = axum_parts.extensions.get::() { + if !self.disable_auth_token_passthrough + && let Some(token) = axum_parts.extensions.get::() + { headers.typed_insert(token.deref().clone()); } @@ -242,7 +245,9 @@ impl ServerHandler for Running { let mut headers = self.headers.clone(); if let Some(axum_parts) = context.extensions.get::() { // Optionally extract the validated token and propagate it to upstream servers if present - if let Some(token) = axum_parts.extensions.get::() { + if !self.disable_auth_token_passthrough + && let Some(token) = axum_parts.extensions.get::() + { headers.typed_insert(token.deref().clone()); } @@ -302,7 +307,12 @@ impl ServerHandler for Running { ServerInfo { server_info: Implementation { name: "Apollo MCP Server".to_string(), + icons: None, + title: Some("Apollo MCP Server".to_string()), version: env!("CARGO_PKG_VERSION").to_string(), + website_url: Some( + "https://www.apollographql.com/docs/apollo-mcp-server".to_string(), + ), }, capabilities: ServerCapabilities::builder() .enable_tools() @@ -355,6 +365,7 @@ mod tests { mutation_mode: MutationMode::None, disable_type_description: false, disable_schema_description: false, + disable_auth_token_passthrough: false, health_check: None, }; diff --git a/crates/apollo-mcp-server/src/server/states/starting.rs b/crates/apollo-mcp-server/src/server/states/starting.rs index a23b137b..4ef0dac4 100644 --- a/crates/apollo-mcp-server/src/server/states/starting.rs +++ b/crates/apollo-mcp-server/src/server/states/starting.rs @@ -148,6 +148,7 @@ impl Starting { mutation_mode: self.config.mutation_mode, disable_type_description: self.config.disable_type_description, disable_schema_description: self.config.disable_schema_description, + disable_auth_token_passthrough: self.config.disable_auth_token_passthrough, health_check: health_check.clone(), }; @@ -245,9 +246,14 @@ impl Starting { } Transport::Stdio => { info!("Starting MCP server in stdio mode"); - let service = running.clone().serve(stdio()).await.inspect_err(|e| { - error!("serving error: {:?}", e); - })?; + let service = running + .clone() + .serve(stdio()) + .await + .inspect_err(|e| { + error!("serving error: {:?}", e); + }) + .map_err(Box::new)?; service.waiting().await.map_err(ServerError::StartupError)?; } } diff --git a/crates/apollo-schema-index/Cargo.toml b/crates/apollo-schema-index/Cargo.toml index e2be15b6..86f8389f 100644 --- a/crates/apollo-schema-index/Cargo.toml +++ b/crates/apollo-schema-index/Cargo.toml @@ -1,10 +1,12 @@ [package] name = "apollo-schema-index" -edition = "2024" authors.workspace = true +edition.workspace = true +license-file.workspace = true +repository.workspace = true +rust-version.workspace = true version.workspace = true -license-file = "../LICENSE" -repository = "https://github.com/apollographql/apollo-mcp-server" + description = "GraphQL schema indexing" [dependencies] diff --git a/crates/apollo-schema-index/src/traverse.rs b/crates/apollo-schema-index/src/traverse.rs index fa0137e9..03139345 100644 --- a/crates/apollo-schema-index/src/traverse.rs +++ b/crates/apollo-schema-index/src/traverse.rs @@ -53,67 +53,65 @@ impl SchemaExt for Schema { ); let cloned = current_path.clone(); - if let Some(extended_type) = self.types.get(named_type) { - if !extended_type.is_built_in() { - if traverse_children { - match extended_type { - ExtendedType::Object(obj) => { - stack.extend(obj.fields.values().map(|field| { - let field_type = field.ty.inner_named_type(); - let field_args = field - .arguments - .iter() - .map(|arg| arg.ty.inner_named_type().clone()) - .collect::>(); - ( - field_type, - current_path.clone().add_child( - Some(field.name.clone()), - field_args, - field_type.clone(), - ), - ) - })); - } - ExtendedType::Interface(interface) => { - stack.extend(interface.fields.values().map(|field| { - let field_type = field.ty.inner_named_type(); - let field_args = field - .arguments - .iter() - .map(|arg| arg.ty.inner_named_type().clone()) - .collect::>(); + if let Some(extended_type) = self.types.get(named_type) + && !extended_type.is_built_in() + { + if traverse_children { + match extended_type { + ExtendedType::Object(obj) => { + stack.extend(obj.fields.values().map(|field| { + let field_type = field.ty.inner_named_type(); + let field_args = field + .arguments + .iter() + .map(|arg| arg.ty.inner_named_type().clone()) + .collect::>(); + ( + field_type, + current_path.clone().add_child( + Some(field.name.clone()), + field_args, + field_type.clone(), + ), + ) + })); + } + ExtendedType::Interface(interface) => { + stack.extend(interface.fields.values().map(|field| { + let field_type = field.ty.inner_named_type(); + let field_args = field + .arguments + .iter() + .map(|arg| arg.ty.inner_named_type().clone()) + .collect::>(); + ( + field_type, + current_path.clone().add_child( + Some(field.name.clone()), + field_args, + field_type.clone(), + ), + ) + })); + } + ExtendedType::Union(union) => { + stack.extend(union.members.iter().map(|member| &member.name).map( + |next_type| { ( - field_type, + next_type, current_path.clone().add_child( - Some(field.name.clone()), - field_args, - field_type.clone(), + None, + vec![], + next_type.clone(), ), ) - })); - } - ExtendedType::Union(union) => { - stack.extend( - union.members.iter().map(|member| &member.name).map( - |next_type| { - ( - next_type, - current_path.clone().add_child( - None, - vec![], - next_type.clone(), - ), - ) - }, - ), - ); - } - _ => {} + }, + )); } + _ => {} } - return Some((extended_type, cloned)); } + return Some((extended_type, cloned)); } } None diff --git a/docs/source/config-file.mdx b/docs/source/config-file.mdx index ba7c45dc..294bf697 100644 --- a/docs/source/config-file.mdx +++ b/docs/source/config-file.mdx @@ -165,13 +165,14 @@ The available fields depend on the value of the nested `type` key: These fields are under the top-level `transport` key, nested under the `auth` key. Learn more about [authorization and authentication](/apollo-mcp-server/auth). -| Option | Type | Default | Description | -| :----------------------- | :------------- | :------ | :------------------------------------------------------------------------------------------------- | -| `servers` | `List` | | List of upstream delegated OAuth servers (must support OIDC metadata discovery endpoint) | -| `audiences` | `List` | | List of accepted audiences from upstream signed JWTs | -| `resource` | `string` | | The externally available URL pointing to this MCP server. Can be `localhost` when testing locally. | -| `resource_documentation` | `string` | | Optional link to more documentation relating to this MCP server | -| `scopes` | `List` | | List of queryable OAuth scopes from the upstream OAuth servers | +| Option | Type | Default | Description | +| :-------------------------------- | :------------- | :------ | :------------------------------------------------------------------------------------------------- | +| `servers` | `List` | | List of upstream delegated OAuth servers (must support OIDC metadata discovery endpoint) | +| `audiences` | `List` | | List of accepted audiences from upstream signed JWTs | +| `resource` | `string` | | The externally available URL pointing to this MCP server. Can be `localhost` when testing locally. | +| `resource_documentation` | `string` | | Optional link to more documentation relating to this MCP server | +| `scopes` | `List` | | List of queryable OAuth scopes from the upstream OAuth servers | +| `disable_auth_token_passthrough` | `bool` | `false` | Optional flag to disable passing validated Authorization header to downstream API | Below is an example configuration using `StreamableHTTP` transport with authentication: diff --git a/docs/source/index.mdx b/docs/source/index.mdx index 4dd1b288..14eb3477 100644 --- a/docs/source/index.mdx +++ b/docs/source/index.mdx @@ -3,6 +3,7 @@ title: Apollo MCP Server subtitle: Enable graph-based API orchestration with AI redirectFrom: - /apollo-mcp-server/user-guide + - /apollo-mcp-server/guides --- diff --git a/docs/source/install.mdx b/docs/source/install.mdx index 9ec643e1..5e4114a0 100644 --- a/docs/source/install.mdx +++ b/docs/source/install.mdx @@ -26,14 +26,14 @@ To download a **specific version** of Apollo MCP Server (recommended for CI envi ```bash # Note the `v` prefixing the version number -docker image pull ghcr.io/apollographql/apollo-mcp-server:v0.7.5 +docker image pull ghcr.io/apollographql/apollo-mcp-server:v0.8.0 ``` To download a specific version of Apollo MCP Server that is a release candidate: ```bash # Note the `v` prefixing the version number and the `-rc` suffix -docker image pull ghcr.io/apollographql/apollo-mcp-server:v0.7.5-rc.1 +docker image pull ghcr.io/apollographql/apollo-mcp-server:v0.8.0-rc.1 ``` @@ -65,7 +65,7 @@ To install or upgrade to a **specific version** of Apollo MCP Server (recommende ```bash # Note the `v` prefixing the version number -curl -sSL https://mcp.apollo.dev/download/nix/v0.7.5 | sh +curl -sSL https://mcp.apollo.dev/download/nix/v0.8.0 | sh ``` If your machine doesn't have the `curl` command, you can get the latest version from the [`curl` downloads page](https://curl.se/download.html). @@ -82,5 +82,5 @@ To install or upgrade to a **specific version** of Apollo MCP Server (recommende ```bash # Note the `v` prefixing the version number -iwr 'https://mcp.apollo.dev/download/win/v0.7.5' | iex +iwr 'https://mcp.apollo.dev/download/win/v0.8.0' | iex ``` diff --git a/flake.lock b/flake.lock index 5e82287f..7e811193 100644 --- a/flake.lock +++ b/flake.lock @@ -3,11 +3,11 @@ "cache-nix-action": { "flake": false, "locked": { - "lastModified": 1746350578, - "narHash": "sha256-66auSJUldF+QLnMZEvOR9y9+P6doadeHmYl5UDFqVic=", + "lastModified": 1754213534, + "narHash": "sha256-4QgmQ8UAecAuu84hh5dYni1ahlvXvu2UdCDme6Jnh68=", "owner": "nix-community", "repo": "cache-nix-action", - "rev": "76f6697d63b7378f7161d52f3d81784130ecd90d", + "rev": "e2cf51da82e145785f5db595f553f7cbc2ca54df", "type": "github" }, "original": { @@ -18,11 +18,11 @@ }, "crane": { "locked": { - "lastModified": 1751562746, - "narHash": "sha256-smpugNIkmDeicNz301Ll1bD7nFOty97T79m4GUMUczA=", + "lastModified": 1755993354, + "narHash": "sha256-FCRRAzSaL/+umLIm3RU3O/+fJ2ssaPHseI2SSFL8yZU=", "owner": "ipetkov", "repo": "crane", - "rev": "aed2020fd3dc26e1e857d4107a5a67a33ab6c1fd", + "rev": "25bd41b24426c7734278c2ff02e53258851db914", "type": "github" }, "original": { @@ -82,11 +82,11 @@ ] }, "locked": { - "lastModified": 1751510438, - "narHash": "sha256-m8PjOoyyCR4nhqtHEBP1tB/jF+gJYYguSZmUmVTEAQE=", + "lastModified": 1756089517, + "narHash": "sha256-KGinVKturJFPrRebgvyUB1BUNqf1y9FN+tSJaTPlnFE=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "7f415261f298656f8164bd636c0dc05af4e95b6b", + "rev": "44774c8c83cd392c50914f86e1ff75ef8619f1cd", "type": "github" }, "original": { @@ -112,11 +112,11 @@ }, "unstable": { "locked": { - "lastModified": 1751498133, - "narHash": "sha256-QWJ+NQbMU+NcU2xiyo7SNox1fAuwksGlQhpzBl76g1I=", + "lastModified": 1756128520, + "narHash": "sha256-R94HxJBi+RK1iCm8Y4Q9pdrHZl0GZoDPIaYwjxRNPh4=", "owner": "nixos", "repo": "nixpkgs", - "rev": "d55716bb59b91ae9d1ced4b1ccdea7a442ecbfdb", + "rev": "c53baa6685261e5253a1c355a1b322f82674a824", "type": "github" }, "original": { diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 03648c85..c8ad2110 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "stable" +channel = "1.89.0" profile = "default" components = ["rust-analyzer", "rust-src"] diff --git a/scripts/nix/install.sh b/scripts/nix/install.sh index 40767447..de8fa9d0 100755 --- a/scripts/nix/install.sh +++ b/scripts/nix/install.sh @@ -14,7 +14,7 @@ BINARY_DOWNLOAD_PREFIX="${APOLLO_MCP_SERVER_BINARY_DOWNLOAD_PREFIX:="https://git # Apollo MCP Server version defined in apollo-mcp-server's Cargo.toml # Note: Change this line manually during the release steps. -PACKAGE_VERSION="v0.7.5" +PACKAGE_VERSION="v0.8.0" download_binary_and_run_installer() { downloader --check diff --git a/scripts/windows/install.ps1 b/scripts/windows/install.ps1 index d8007236..05072c3e 100644 --- a/scripts/windows/install.ps1 +++ b/scripts/windows/install.ps1 @@ -8,7 +8,7 @@ # Apollo MCP Server version defined in apollo-mcp-server's Cargo.toml # Note: Change this line manually during the release steps. -$package_version = 'v0.7.5' +$package_version = 'v0.8.0' function Install-Binary($apollo_mcp_server_install_args) { $old_erroractionpreference = $ErrorActionPreference